diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000000..352c2766ae16 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,29 @@ +# Source: https://github.com/a5chin/python-uv +FROM debian:bookworm-slim AS builder + +ENV CARGO_HOME="/opt/.cargo" + +SHELL [ "/bin/bash", "-o", "pipefail", "-c" ] + +WORKDIR /opt + +# The installer requires curl (and certificates) to download the release archive +# hadolint ignore=DL3008 +RUN apt-get update && \ + apt-get install -y --no-install-recommends ca-certificates curl + +# Run uv installer +RUN curl -LsSf https://astral.sh/uv/install.sh | sh + + +FROM mcr.microsoft.com/vscode/devcontainers/base:bookworm + + +ENV CARGO_HOME="/opt/.cargo" +ENV PATH="$CARGO_HOME/bin/:$PATH" +ENV PYTHONUNBUFFERED=True +ENV UV_LINK_MODE=copy + +WORKDIR /opt + +COPY --from=builder --chown=vscode: $CARGO_HOME $CARGO_HOME \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4df8df96b0f2..88c16501327c 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,13 +2,14 @@ // README at: https://github.com/devcontainers/templates/tree/main/src/universal { "name": "Langflow Dev Container", - // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile - "image": "mcr.microsoft.com/devcontainers/python:3.10", - + "build": { + "context": "..", + "dockerfile": "Dockerfile" + }, // Features to add to the dev container. More info: https://containers.dev/features. "features": { "ghcr.io/devcontainers/features/node": {}, - "ghcr.io/devcontainers-contrib/features/poetry": {} + "ghcr.io/dhoeric/features/hadolint:1": {} }, // Use 'forwardPorts' to make a list of ports inside the container available locally. @@ -16,26 +17,35 @@ // Use 'postCreateCommand' to run commands after the container is created. "postCreateCommand": "make install_frontend && make install_backend", - - "containerEnv": { - "POETRY_VIRTUALENVS_IN_PROJECT": "true" - }, + "postStartCommand": "make init", // Configure tool-specific properties. "customizations": { "vscode": { "extensions": [ + "charliermarsh.ruff", + "njpwerner.autodocstring", + "oderwat.indent-rainbow", + "exiasr.hadolint", "actboy168.tasks", "GitHub.copilot", "ms-python.python", - "sourcery.sourcery", "eamodio.gitlens", "ms-vscode.makefile-tools", "GitHub.vscode-pull-request-github" - ] + ], + "settings": { + "terminal.integrated.defaultProfile.linux": "zsh", + "terminal.integrated.profiles.linux": { + "zsh": { + "path": "/bin/zsh" + } + } + } } - } + }, // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. // "remoteUser": "root" + "remoteUser": "vscode" } diff --git a/.github/actions/setup-uv/action.yml b/.github/actions/setup-uv/action.yml new file mode 100644 index 000000000000..1b75878f3f83 --- /dev/null +++ b/.github/actions/setup-uv/action.yml @@ -0,0 +1,25 @@ +name: "Setup uv" +description: "Checks out code, installs uv, and sets up Python environment" + +runs: + using: "composite" + steps: + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: "Set up Python" + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: /tmp/.uv-cache + key: uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + restore-keys: | + uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + uv-${{ runner.os }} diff --git a/.github/changes-filter.yaml b/.github/changes-filter.yaml index f3df3db58882..36eb19a62a1b 100644 --- a/.github/changes-filter.yaml +++ b/.github/changes-filter.yaml @@ -5,8 +5,11 @@ python: - "pyproject.toml" - "poetry.lock" - "**/python_test.yml" -tests: - - "tests/**" +components-changes: + - "src/backend/base/langflow/components/**" +starter-projects-changes: + - "src/backend/base/langflow/initial_setup/**" +frontend-tests: - "src/frontend/tests/**" frontend: - "src/frontend/**" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f09031e6aae7..a53481d2ba8d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ on: description: "Frontend Tests Folder" required: false type: string - default: "tests/end-to-end" + default: "tests/core" workflow_dispatch: inputs: branch: @@ -35,7 +35,6 @@ on: pull_request: types: [synchronize, labeled] - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -55,14 +54,15 @@ jobs: path-filter: needs: set-ci-condition if: ${{ needs.set-ci-condition.outputs.should-run-ci == 'true' }} - name: Filter Paths runs-on: ubuntu-latest outputs: python: ${{ steps.filter.outputs.python }} frontend: ${{ steps.filter.outputs.frontend }} docs: ${{ steps.filter.outputs.docs }} - tests: ${{ steps.filter.outputs.tests }} + frontend-tests: ${{ steps.filter.outputs.frontend-tests }} + components-changes: ${{ steps.filter.outputs.components-changes }} + starter-projects-changes: ${{ steps.filter.outputs.starter-projects-changes }} steps: - name: Checkout code uses: actions/checkout@v4 @@ -77,7 +77,7 @@ jobs: test-backend: needs: path-filter name: Run Backend Tests - if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }} + if: ${{ needs.path-filter.outputs.python == 'true'}} uses: ./.github/workflows/python_test.yml with: python-versions: ${{ inputs.python-versions || '["3.10"]' }} @@ -85,7 +85,7 @@ jobs: test-frontend: needs: path-filter name: Run Frontend Tests - if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.frontend == 'true' || needs.path-filter.outputs.tests == 'true' }} + if: ${{ needs.path-filter.outputs.frontend == 'true' || needs.path-filter.outputs.frontend-tests == 'true' || needs.path-filter.outputs.components-changes == 'true' || needs.path-filter.outputs.starter-projects-changes == 'true' }} uses: ./.github/workflows/typescript_test.yml with: tests_folder: ${{ inputs.frontend-tests-folder }} @@ -95,10 +95,9 @@ jobs: lint-backend: needs: path-filter - if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }} + if: ${{ needs.path-filter.outputs.python == 'true'}} name: Lint Backend uses: ./.github/workflows/lint-py.yml - # Run only if there are python files changed test-docs-build: needs: path-filter @@ -109,7 +108,14 @@ jobs: # https://github.com/langchain-ai/langchain/blob/master/.github/workflows/check_diffs.yml ci_success: name: "CI Success" - needs: [test-backend,test-frontend,lint-backend,test-docs-build,set-ci-condition] + needs: + [ + test-backend, + test-frontend, + lint-backend, + test-docs-build, + set-ci-condition, + ] if: always() runs-on: ubuntu-latest @@ -123,4 +129,4 @@ jobs: echo $JOBS_JSON echo $RESULTS_JSON echo "Exiting with $EXIT_CODE" - exit $EXIT_CODE \ No newline at end of file + exit $EXIT_CODE diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml new file mode 100644 index 000000000000..1d6618176b42 --- /dev/null +++ b/.github/workflows/codspeed.yml @@ -0,0 +1,44 @@ +name: Run benchmarks + +on: + push: + paths: + - "src/backend/base/**" + - "src/backend/tests/performance/**" + branches: + - "main" # or "master" + pull_request: + paths: + - "src/backend/base/**" + - "src/backend/tests/performance/**" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + codspeed: + name: Run benchmarks + runs-on: ubuntu-latest + strategy: + matrix: + python-version: + - "3.12" + steps: + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: | + uv run pytest src/backend/tests \ + --ignore=src/backend/tests/integration \ + --codspeed \ + -m "not api_key_required" \ + -n auto + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/conventional-labels.yml b/.github/workflows/conventional-labels.yml index 676c87564817..06c9fc46064b 100644 --- a/.github/workflows/conventional-labels.yml +++ b/.github/workflows/conventional-labels.yml @@ -15,7 +15,6 @@ jobs: uses: Namchee/conventional-pr@v0.15.4 with: access_token: ${{ secrets.GITHUB_TOKEN }} - verbose: true issue: false label: diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml deleted file mode 100644 index e1b806ccf074..000000000000 --- a/.github/workflows/create-release.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Create Release -on: - workflow_dispatch: - inputs: - version: - description: "Version to release" - required: true - type: string - release_type: - description: "Type of release (base or main)" - required: true - type: choice - options: - - base - - main - -env: - POETRY_VERSION: "1.8.2" -jobs: - release: - name: Build Langflow - runs-on: ubuntu-latest - outputs: - version: ${{ steps.check-version.outputs.version }} - steps: - - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry==${{ env.POETRY_VERSION }} - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - cache: "poetry" - - name: Build project for distribution - run: | - if [ "${{ inputs.release_type }}" == "base" ]; then - make build base=true - else - make build main=true - fi - - name: Upload Artifact - uses: actions/upload-artifact@v4 - with: - name: dist${{ inputs.release_type }} - path: ${{ inputs.release_type == 'base' && 'src/backend/base/dist' || 'dist' }} - create_release: - name: Create Release Job - runs-on: ubuntu-latest - needs: release - steps: - - uses: actions/download-artifact@v4 - with: - name: dist${{ inputs.release_type }} - path: dist - - name: Create Release Notes - uses: ncipollo/release-action@v1 - with: - artifacts: "dist/*" - token: ${{ secrets.GITHUB_TOKEN }} - draft: false - generateReleaseNotes: true - prerelease: true - tag: v${{ inputs.version }} - commit: dev diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 9c77e9813c3f..acd6d1aacc82 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -2,32 +2,51 @@ name: Docker Build and Push on: workflow_call: inputs: - version: + main_version: + required: true + type: string + description: "Main version to tag images with. Required for both main and base releases." + base_version: required: false type: string + description: "Base version to tag images with. Required for base release type." release_type: required: true type: string + description: "Release type. One of 'main', 'main-ep', 'base', 'nightly-main', 'nightly-base'." pre_release: required: false type: boolean default: false + ref: + required: false + type: string + description: "Ref to check out. If not specified, will default to the main version or current branch." workflow_dispatch: inputs: - version: - required: true + main_version: + description: "Main version to tag images with. Required for both main and base releases." + required: false + type: string + base_version: + description: "Base version to tag images with. Required for base release type." + required: false type: string release_type: + description: "Type of release. One of 'main', 'main-ep', 'base', 'nightly-main', 'nightly-base'." required: true - type: choice - options: - - base - - main + type: string pre_release: required: false type: boolean - default: true + default: false + ref: + required: false + type: string + description: "Ref to check out. If not specified, will default to the main version or current branch." + + env: POETRY_VERSION: "1.8.2" TEST_TAG: "langflowai/langflow:test" @@ -36,111 +55,239 @@ jobs: get-version: name: Get Version runs-on: ubuntu-latest - outputs: version: ${{ steps.get-version-input.outputs.version || steps.get-version-base.outputs.version || steps.get-version-main.outputs.version }} steps: - - uses: actions/checkout@v4 - - name: Set up Python 3.12 + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" + - name: Verify a main version exists + if: ${{ inputs.main_version == '' }} + run: | + # due to our how we split packages, we need to have a main version to check out. + echo "Must specify a main version to check out." + exit 1 + + - name: Check out the code at a specific ref + uses: actions/checkout@v4 with: - python-version: "3.12" - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Get Version from Input - if : ${{ inputs.version != '' }} + ref: ${{ inputs.ref || inputs.main_version || github.ref }} + persist-credentials: true + + - name: Get Version to Tag + if: ${{ inputs.main_version != '' }} id: get-version-input run: | - version=${{ inputs.version }} - echo version=$version >> $GITHUB_OUTPUT - - name: Get Version Main - if : ${{ inputs.version == '' && inputs.release_type == 'base' }} + # Produces the versions we will use to tag the docker images with. + + if [[ "${{ inputs.release_type }}" == "base" && "${{ inputs.base_version }}" == '' ]]; then + echo "Must specify a base version for base release type." + exit 1 + fi + + if [[ "${{ inputs.release_type }}" == "nightly-base" && "${{ inputs.base_version }}" == '' ]]; then + echo "Must specify a base version for nightly-base release type." + exit 1 + fi + + if [[ "${{ inputs.release_type }}" == "main" && "${{ inputs.main_version }}" == '' ]]; then + echo "Must specify a main version for main release type." + exit 1 + fi + + if [[ "${{ inputs.release_type }}" == "main-ep" && "${{ inputs.main_version }}" == '' ]]; then + echo "Must specify a main version for main-ep release type." + exit 1 + fi + + if [[ "${{ inputs.release_type }}" == "nightly-main" && "${{ inputs.main_version }}" == '' ]]; then + echo "Must specify a main version for nightly-main release type." + exit 1 + fi + + if [[ "${{ inputs.release_type }}" == "base" || "${{ inputs.release_type }}" == "nightly-base" ]]; then + version=${{ inputs.base_version }} + echo "base version=${{ inputs.base_version }}" + echo version=$version + echo version=$version >> $GITHUB_OUTPUT + elif [[ "${{ inputs.release_type }}" == "main" || "${{ inputs.release_type }}" == "main-ep" || "${{ inputs.release_type }}" == "nightly-main" ]]; then + version=${{ inputs.main_version }} + echo version=$version + echo version=$version >> $GITHUB_OUTPUT + else + echo "No version or ref specified. Exiting the workflow." + exit 1 + fi + - name: Get Version Base + if: ${{ inputs.base_version == '' && (inputs.release_type == 'base' || inputs.release_type == 'nightly-base') }} id: get-version-base run: | - version=$(cd src/backend/base && poetry version --short) + version=$(uv tree | grep 'langflow-base' | awk '{print $3}' | sed 's/^v//') + if [ -z "$version" ]; then + echo "Failed to extract version from uv tree output" + exit 1 + fi + echo version=$version echo version=$version >> $GITHUB_OUTPUT - - name: Get Version Base - if : ${{ inputs.version == '' && inputs.release_type == 'main' }} + - name: Get Version Main + if: ${{ inputs.main_version == '' && (inputs.release_type == 'main' || inputs.release_type == 'main-ep' || inputs.release_type == 'nightly-main') }} id: get-version-main run: | - version=$(poetry version --short) + version=$(uv tree | grep 'langflow' | grep -v 'langflow-base' | awk '{print $2}' | sed 's/^v//') + echo version=$version echo version=$version >> $GITHUB_OUTPUT setup: runs-on: ubuntu-latest needs: get-version outputs: - tags: ${{ steps.set-vars.outputs.tags }} + docker_tags: ${{ steps.set-vars.outputs.docker_tags }} + ghcr_tags: ${{ steps.set-vars.outputs.ghcr_tags }} file: ${{ steps.set-vars.outputs.file }} steps: - - uses: actions/checkout@v4 - name: Set Dockerfile and Tags id: set-vars run: | - if [[ "${{ inputs.release_type }}" == "base" ]]; then - echo "tags=langflowai/langflow:base-${{ needs.get-version.outputs.version }},langflowai/langflow:base-latest" >> $GITHUB_OUTPUT + nightly_suffix='' + if [[ "${{ inputs.release_type }}" == "nightly-base" || "${{ inputs.release_type }}" == "nightly-main" ]]; then + nightly_suffix="-nightly" + fi + + if [[ "${{ inputs.release_type }}" == "base" || "${{ inputs.release_type }}" == "nightly-base" ]]; then + # LANGFLOW-BASE RELEASE + echo "docker_tags=langflowai/langflow${nightly_suffix}:base-${{ needs.get-version.outputs.version }},langflowai/langflow${nightly_suffix}:base-latest" >> $GITHUB_OUTPUT + echo "ghcr_tags=ghcr.io/langflow-ai/langflow${nightly_suffix}:base-${{ needs.get-version.outputs.version }},ghcr.io/langflow-ai/langflow${nightly_suffix}:base-latest" >> $GITHUB_OUTPUT echo "file=./docker/build_and_push_base.Dockerfile" >> $GITHUB_OUTPUT else if [[ "${{ inputs.pre_release }}" == "true" ]]; then - echo "tags=langflowai/langflow:${{ needs.get-version.outputs.version }}" >> $GITHUB_OUTPUT + # LANGFLOW-MAIN PRE-RELEASE + echo "docker_tags=langflowai/langflow${nightly_suffix}:${{ needs.get-version.outputs.version }}" >> $GITHUB_OUTPUT + echo "ghcr_tags=ghcr.io/langflow-ai/langflow${nightly_suffix}:${{ needs.get-version.outputs.version }}" >> $GITHUB_OUTPUT + echo "file=./docker/build_and_push.Dockerfile" >> $GITHUB_OUTPUT + elif [[ "${{ inputs.release_type }}" == "main-ep" ]]; then + # LANGFLOW-MAIN (ENTRYPOINT) RELEASE + echo "docker_tags=langflowai/langflow-ep${nightly_suffix}:${{ needs.get-version.outputs.version }},langflowai/langflow-ep${nightly_suffix}:latest" >> $GITHUB_OUTPUT + echo "ghcr_tags=ghcr.io/langflow-ai/langflow-ep${nightly_suffix}:${{ needs.get-version.outputs.version }},ghcr.io/langflow-ai/langflow-ep${nightly_suffix}:latest" >> $GITHUB_OUTPUT + echo "file=./docker/build_and_push_ep.Dockerfile" >> $GITHUB_OUTPUT + elif [[ "${{ inputs.release_type }}" == "main" || "${{ inputs.release_type }}" == "nightly-main" ]]; then + # LANGFLOW-MAIN RELEASE + echo "docker_tags=langflowai/langflow${nightly_suffix}:${{ needs.get-version.outputs.version }},langflowai/langflow${nightly_suffix}:latest" >> $GITHUB_OUTPUT + echo "ghcr_tags=ghcr.io/langflow-ai/langflow${nightly_suffix}:${{ needs.get-version.outputs.version }},ghcr.io/langflow-ai/langflow${nightly_suffix}:latest" >> $GITHUB_OUTPUT + echo "file=./docker/build_and_push.Dockerfile" >> $GITHUB_OUTPUT else - echo "tags=langflowai/langflow:${{ needs.get-version.outputs.version }},langflowai/langflow:latest" >> $GITHUB_OUTPUT + echo "Invalid release type. Exiting the workflow." + exit 1 fi - echo "file=./docker/build_and_push.Dockerfile" >> $GITHUB_OUTPUT fi build: runs-on: ubuntu-latest - needs: setup + needs: [get-version, setup] steps: - - uses: actions/checkout@v4 + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || inputs.main_version || github.ref }} + persist-credentials: true + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: | + if [[ "${{ inputs.release_type }}" == "base" || "${{ inputs.release_type }}" == "nightly-base" ]]; then + uv sync --directory src/backend/base --no-dev --no-sources + else + uv sync --no-dev --no-sources + fi + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build and Push Docker Image + - name: Build and Push to Docker Hub uses: docker/build-push-action@v6 with: context: . push: true file: ${{ needs.setup.outputs.file }} - tags: ${{ needs.setup.outputs.tags }} + tags: ${{ needs.setup.outputs.docker_tags }} + # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines) + provenance: false + + - name: Login to Github Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.TEMP_GHCR_TOKEN}} + - name: Build and push to Github Container Registry + uses: docker/build-push-action@v6 + with: + context: . + push: true + file: ${{ needs.setup.outputs.file }} + tags: ${{ needs.setup.outputs.ghcr_tags }} # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines) provenance: false build_components: if: ${{ inputs.release_type == 'main' }} runs-on: ubuntu-latest + permissions: + packages: write needs: [build, get-version] strategy: matrix: - component: [backend, frontend] + component: [docker-backend, docker-frontend, ghcr-backend, ghcr-frontend] include: - - component: backend + - component: docker-backend dockerfile: ./docker/build_and_push_backend.Dockerfile - tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-backend:{0}', needs.get-version.outputs.version) || format('langflowai/langflow-backend:{0},langflowai/langflow-backend:latest', needs.get-version.outputs.version) }} - - component: frontend + tags: langflowai/langflow-backend:${{ needs.get-version.outputs.version }},langflowai/langflow-backend:latest + langflow_image: langflowai/langflow:${{ needs.get-version.outputs.version }} + - component: docker-frontend dockerfile: ./docker/frontend/build_and_push_frontend.Dockerfile - tags: ${{ inputs.pre_release == 'true' && format('langflowai/langflow-frontend:{0}', needs.get-version.outputs.version) || format('langflowai/langflow-frontend:{0},langflowai/langflow-frontend:latest', needs.get-version.outputs.version) }} + tags: langflowai/langflow-frontend:${{ needs.get-version.outputs.version }},langflowai/langflow-frontend:latest + langflow_image: langflowai/langflow:${{ needs.get-version.outputs.version }} + - component: ghcr-backend + dockerfile: ./docker/build_and_push_backend.Dockerfile + tags: ghcr.io/langflow-ai/langflow-backend:${{ needs.get-version.outputs.version }},ghcr.io/langflow-ai/langflow-backend:latest + langflow_image: ghcr.io/langflow-ai/langflow:${{ needs.get-version.outputs.version }} + - component: ghcr-frontend + dockerfile: ./docker/frontend/build_and_push_frontend.Dockerfile + tags: ghcr.io/langflow-ai/langflow-frontend:${{ needs.get-version.outputs.version }},ghcr.io/langflow-ai/langflow-frontend:latest + langflow_image: ghcr.io/langflow-ai/langflow:${{ needs.get-version.outputs.version }} steps: - - uses: actions/checkout@v4 + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || inputs.main_version || github.ref }} + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + if: ${{ matrix.component == 'docker-backend' }} || ${{ matrix.component == 'docker-frontend' }} uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Wait for Docker Hub to propagate (for backend) + + - name: Login to Github Container Registry + if: ${{ matrix.component == 'ghcr-backend' }} || ${{ matrix.component == 'ghcr-frontend' }} + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.TEMP_GHCR_TOKEN}} + + - name: Wait for propagation (for backend) run: sleep 120 + - name: Build and push ${{ matrix.component }} uses: docker/build-push-action@v6 with: context: . push: true build-args: | - LANGFLOW_IMAGE=langflowai/langflow:${{ needs.get-version.outputs.version }} + LANGFLOW_IMAGE=${{ matrix.langflow_image }} file: ${{ matrix.dockerfile }} tags: ${{ matrix.tags }} # provenance: false will result in a single manifest for all platforms which makes the image pullable from arm64 machines via the emulation (e.g. Apple Silicon machines) @@ -150,24 +297,22 @@ jobs: name: Restart HuggingFace Spaces if: ${{ inputs.release_type == 'main' }} runs-on: ubuntu-latest - needs: build + needs: [build, get-version] strategy: matrix: python-version: - "3.12" steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" + - name: Check out the code at a specific ref + uses: actions/checkout@v4 with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Install Python dependencies - run: | - poetry env use ${{ matrix.python-version }} - poetry install + ref: ${{ inputs.ref || inputs.main_version || github.ref }} + - name: "Setup Environment" + uses: ./.github/actions/setup-uv - name: Restart HuggingFace Spaces Build run: | - poetry run python ./scripts/factory_restart_space.py --space "Langflow/Langflow" --token ${{ secrets.HUGGINGFACE_API_TOKEN }} + uv run ./scripts/factory_restart_space.py --space "Langflow/Langflow" --token ${{ secrets.HUGGINGFACE_API_TOKEN }} + + + diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index 07b826c588ef..9564ccd271b1 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -35,7 +35,7 @@ jobs: - name: Test image run: | expected_version=$(cat pyproject.toml | grep version | head -n 1 | cut -d '"' -f 2) - version=$(docker run --rm --entrypoint bash langflowai/langflow:latest-dev -c 'python -c "from langflow.version import __version__ as langflow_version; print(langflow_version)"') + version=$(docker run --rm --entrypoint bash langflowai/langflow:latest-dev -c 'python -c "from langflow.version.version import get_version; print(get_version())"') if [ "$expected_version" != "$version" ]; then echo "Expected version: $expected_version" echo "Actual version: $version" @@ -51,7 +51,7 @@ jobs: - name: Test backend image run: | expected_version=$(cat pyproject.toml | grep version | head -n 1 | cut -d '"' -f 2) - version=$(docker run --rm --entrypoint bash langflowai/langflow-backend:latest-dev -c 'python -c "from langflow.version import __version__ as langflow_version; print(langflow_version)"') + version=$(docker run --rm --entrypoint bash langflowai/langflow-backend:latest-dev -c 'python -c "from langflow.version.version import get_version; print(get_version())"') if [ "$expected_version" != "$version" ]; then echo "Expected version: $expected_version" echo "Actual version: $version" diff --git a/.github/workflows/fetch_docs_notion.yml b/.github/workflows/fetch_docs_notion.yml index b11f566bfe2f..fcc572cb5038 100644 --- a/.github/workflows/fetch_docs_notion.yml +++ b/.github/workflows/fetch_docs_notion.yml @@ -50,7 +50,7 @@ jobs: - name: Create Pull Request id: create_pr - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.GITHUB_TOKEN }} commit-message: Update docs from Notion diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 000000000000..df5480ac3606 --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,50 @@ +name: Integration Tests + +on: + workflow_dispatch: + inputs: + ref: + description: "(Optional) ref to checkout" + required: false + type: string + workflow_call: + inputs: + python-versions: + description: "(Optional) Python versions to test" + required: true + type: string + default: "['3.10', '3.11', '3.12']" + ref: + description: "(Optional) ref to checkout" + required: false + type: string + +env: + POETRY_VERSION: "1.8.2" + +jobs: + integration-tests: + name: Run Integration Tests + runs-on: ubuntu-latest + strategy: + max-parallel: 1 # Currently, we can only run at a time for collection-per-db-constraints + matrix: + python-version: + - "3.12" + - "3.11" + - "3.10" + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }} + ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || github.ref }} + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Run integration tests with api keys + timeout-minutes: 20 + run: | + make integration_tests_api_keys diff --git a/.github/workflows/lint-py.yml b/.github/workflows/lint-py.yml index e63a0e82960e..937de410dd83 100644 --- a/.github/workflows/lint-py.yml +++ b/.github/workflows/lint-py.yml @@ -23,21 +23,19 @@ jobs: - "3.11" - "3.10" steps: - - uses: actions/checkout@v4 + - name: Check out the code at a specific ref + uses: actions/checkout@v4 with: ref: ${{ inputs.branch || github.ref }} - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" - with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Install Python dependencies - run: | - poetry env use ${{ matrix.python-version }} - poetry install + persist-credentials: true + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev - name: Run Mypy run: | - make lint + uv run mypy --namespace-packages -p "langflow" env: GITHUB_TOKEN: ${{ secrets.github_token }} + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/nightly_build.yml b/.github/workflows/nightly_build.yml index 9d8179635443..9890ee3e43e4 100644 --- a/.github/workflows/nightly_build.yml +++ b/.github/workflows/nightly_build.yml @@ -3,15 +3,199 @@ name: Nightly Build on: workflow_dispatch: schedule: - - cron: "0 0 * * *" # Run every day at midnight (UTC) + # Run job at 6:30 UTC, 10.30pm PST, or 11.30pm PDT + - cron: "30 6 * * *" env: - POETRY_VERSION: "1.8.2" + POETRY_VERSION: "1.8.3" + PYTHON_VERSION: "3.12" jobs: - hello-world: + create-nightly-tag: + if: github.repository == 'langflow-ai/langflow' runs-on: ubuntu-latest + defaults: + run: + shell: bash -ex -o pipefail {0} + permissions: + # Required to create tag + contents: write + outputs: + main_tag: ${{ steps.generate_main_tag.outputs.main_tag }} + base_tag: ${{ steps.set_base_tag.outputs.base_tag }} steps: - - name: Run hello world + - name: Checkout code + uses: actions/checkout@v4 + with: + persist-credentials: true + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev + + - name: Generate main nightly tag + id: generate_main_tag + run: | + # NOTE: This outputs the tag with the `v` prefix. + MAIN_TAG="$(uv run ./scripts/ci/pypi_nightly_tag.py main)" + echo "main_tag=$MAIN_TAG" >> $GITHUB_OUTPUT + echo "main_tag=$MAIN_TAG" + + - name: Check if main tag already exists + id: check_main_tag + run: | + git fetch --tags + if git rev-parse -q --verify "refs/tags/${{ steps.generate_main_tag.outputs.main_tag }}" >/dev/null; then + echo "main_tag_exists=true" >> $GITHUB_OUTPUT + else + echo "main_tag_exists=false" >> $GITHUB_OUTPUT + fi + + - name: Generate base nightly tag + id: generate_base_tag + if: ${{ steps.check_main_tag.outputs.main_tag_exists == 'false' }} + run: | + # NOTE: This outputs the tag with the `v` prefix. + BASE_TAG="$(uv run ./scripts/ci/pypi_nightly_tag.py base)" + echo "base_tag=$BASE_TAG" >> $GITHUB_OUTPUT + echo "base_tag=$BASE_TAG" + + - name: Commit tag + id: commit_tag + if: ${{ steps.check_main_tag.outputs.main_tag_exists == 'false' }} + run: | + # If the main tag does not exist in GH, we create the base tag from the existing codebase. + + git config --global user.email "bot-nightly-builds@langflow.org" + git config --global user.name "Langflow Bot" + + MAIN_TAG="${{ steps.generate_main_tag.outputs.main_tag }}" + BASE_TAG="${{ steps.generate_base_tag.outputs.base_tag }}" + echo "Updating base project version to $BASE_TAG and updating main project version to $MAIN_TAG" + uv run ./scripts/ci/update_pyproject_combined.py main $MAIN_TAG $BASE_TAG + + uv lock + cd src/backend/base && uv lock && cd ../../.. + + git add pyproject.toml src/backend/base/pyproject.toml uv.lock src/backend/base/uv.lock + git commit -m "Update version and project name" + + echo "Tagging main with $MAIN_TAG" + if ! git tag -a $MAIN_TAG -m "Langflow nightly $MAIN_TAG"; then + echo "Tag creation failed. Exiting the workflow." + exit 1 + fi + + echo "Pushing main tag $MAIN_TAG" + if ! git push origin $MAIN_TAG; then + echo "Tag push failed. Check if the tag already exists. Exiting the workflow." + exit 1 + fi + # TODO: notify on failure + + - name: Checkout main nightly tag + uses: actions/checkout@v4 + if: ${{ steps.check_main_tag.outputs.main_tag_exists == 'true' }} + with: + ref: ${{ steps.generate_main_tag.outputs.main_tag }} + + - name: Retrieve Base Tag + id: retrieve_base_tag + if: ${{ steps.check_main_tag.outputs.main_tag_exists == 'true' }} + working-directory: src/backend/base + run: | + # If the main tag already exists, we need to retrieve the base version from the main tag codebase. + version=$(uv tree | grep 'langflow-base' | awk '{print $3}') + echo "base_tag=$version" >> $GITHUB_OUTPUT + echo "base_tag=$version" + + - name: Set Base Tag + id: set_base_tag run: | - echo "Hello, world!" + if [ "${{ steps.retrieve_base_tag.conclusion }}" != "skipped" ] && [ "${{ steps.retrieve_base_tag.outputs.base_tag }}" ]; then + BASE_TAG="${{ steps.retrieve_base_tag.outputs.base_tag }}" + echo "base_tag=$BASE_TAG" >> $GITHUB_OUTPUT + echo "base_tag=$BASE_TAG" + elif [ "${{ steps.commit_tag.conclusion }}" != "skipped" ] && [ "${{ steps.generate_base_tag.outputs.base_tag }}" ]; then + BASE_TAG="${{ steps.generate_base_tag.outputs.base_tag }}" + echo "base_tag=$BASE_TAG" >> $GITHUB_OUTPUT + echo "base_tag=$BASE_TAG" + else + echo "No base tag found. Exiting the workflow." + exit 1 + fi + + frontend-tests: + if: github.repository == 'langflow-ai/langflow' + name: Run Frontend Tests + needs: create-nightly-tag + uses: ./.github/workflows/typescript_test.yml + with: + tests_folder: "tests" + secrets: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + STORE_API_KEY: ${{ secrets.STORE_API_KEY }} + + backend-unit-tests: + if: github.repository == 'langflow-ai/langflow' + name: Run Backend Unit Tests + needs: create-nightly-tag + uses: ./.github/workflows/python_test.yml + with: + python-versions: '["3.10", "3.11", "3.12"]' + + # Not making nightly builds dependent on integration test success + # due to inherent flakiness of 3rd party integrations + # Revisit when https://github.com/langflow-ai/langflow/pull/3607 is merged. + # backend-integration-tests: + # name: Run Backend Integration Tests + # needs: create-nightly-tag + # uses: ./.github/workflows/integration_tests.yml + # with: + # python-versions: '["3.10", "3.11", "3.12"]' + # ref: ${{ needs.create-nightly-tag.outputs.tag }} + + release-nightly-build: + if: github.repository == 'langflow-ai/langflow' + name: Run Nightly Langflow Build + needs: [frontend-tests, backend-unit-tests, create-nightly-tag] + uses: ./.github/workflows/release_nightly.yml + with: + build_docker_base: true + build_docker_main: true + nightly_tag_main: ${{ needs.create-nightly-tag.outputs.main_tag }} + nightly_tag_base: ${{ needs.create-nightly-tag.outputs.base_tag }} + secrets: inherit + + # slack-notification: + # name: Send Slack Notification + # needs: run-nightly-build + # runs-on: ubuntu-latest + # steps: + # - name: Send success notification to Slack + # if: success() + # uses: slackapi/slack-github-action@v1.26.0 + # with: + # payload: | + # { + # "channel": "#langflow-nightly-builds", + # "username": "GitHub Actions", + # "text": "Nightly Build Successful :white_check_mark:", + # "icon_emoji": ":rocket:" + # } + # env: + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + + # - name: Send failure notification to Slack + # if: failure() + # uses: slackapi/slack-github-action@v1.26.0 + # with: + # payload: | + # { + # "channel": "#langflow-nightly-builds", + # "username": "GitHub Actions", + # "text": "Nightly Build Failed :x:", + # "icon_emoji": ":warning:" + # } + # env: + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/py_autofix.yml b/.github/workflows/py_autofix.yml index eabed5afbf4d..db901491bed7 100644 --- a/.github/workflows/py_autofix.yml +++ b/.github/workflows/py_autofix.yml @@ -2,9 +2,7 @@ name: autofix.ci on: pull_request: paths: - - "poetry.lock" - - "pyproject.toml" - - "src/backend/**" + - "**/*.py" env: POETRY_VERSION: "1.8.2" @@ -14,31 +12,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: install-pinned/ruff@48a5818c5f7ce30e2822b67fb9c26d3e25d31fab - - run: ruff check --fix-only . - - run: ruff format . + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - run: uv run ruff check --fix-only . + - run: uv run ruff format . - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a - lock: - name: Check Poetry lock - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3.12 - uses: "./.github/actions/poetry_caching" - with: - python-version: "3.12" - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Check poetry.lock - id: check - run: | - poetry check --lock - continue-on-error: true - - - name: Run lock - if : steps.check.outcome == 'failure' - run: | - make lock - - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a - - name: Diff poetry.lock - uses: nborrmann/diff-poetry-lock@main + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/python_test.yml b/.github/workflows/python_test.yml index 1c3039a54af9..7ad466f738f2 100644 --- a/.github/workflows/python_test.yml +++ b/.github/workflows/python_test.yml @@ -8,12 +8,17 @@ on: required: true type: string default: "['3.10', '3.11', '3.12']" - workflow_dispatch: - inputs: - branch: - description: "(Optional) Branch to checkout" + ref: + description: "(Optional) ref to checkout" required: false type: string + nightly: + description: "Whether run is from the nightly build" + required: false + type: boolean + default: false + workflow_dispatch: + inputs: python-versions: description: "(Optional) Python versions to test" required: true @@ -22,6 +27,7 @@ on: env: POETRY_VERSION: "1.8.2" NODE_VERSION: "21" + PYTEST_RUN_PATH: "src/backend/tests" jobs: build: @@ -32,34 +38,75 @@ jobs: python-version: ${{ fromJson(inputs.python-versions || '["3.10", "3.11", "3.12"]' ) }} splitCount: [5] group: [1, 2, 3, 4, 5] - env: - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} steps: - uses: actions/checkout@v4 with: - ref: ${{ inputs.branch || github.ref }} + ref: ${{ inputs.ref || github.ref }} - name: Setup Node.js uses: actions/setup-node@v4 id: setup-node with: node-version: ${{ env.NODE_VERSION }} - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" + - name: Install uv + uses: astral-sh/setup-uv@v3 with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Install Python dependencies - run: | - poetry env use ${{ matrix.python-version }} - poetry install + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: "Set up Python" + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: /tmp/.uv-cache + key: uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + restore-keys: | + uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + uv-${{ runner.os }} + - name: Install the project + run: uv sync --dev - name: Run unit tests uses: nick-fields/retry@v3 with: timeout_minutes: 12 max_attempts: 2 - command: make unit_tests async=false args="--splits ${{ matrix.splitCount }} --group ${{ matrix.group }}" - + command: make unit_tests async=false args="-x --splits ${{ matrix.splitCount }} --group ${{ matrix.group }}" + - name: Minimize uv cache + run: uv cache prune --ci + integration-tests: + name: Integration Tests - Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ${{ fromJson(inputs.python-versions || '["3.10", "3.11", "3.12"]' ) }} + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || github.ref }} + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: "Set up Python" + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: /tmp/.uv-cache + key: uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + restore-keys: | + uv-${{ runner.os }}-${{ hashFiles('uv.lock') }} + uv-${{ runner.os }} + - name: Install the project + run: uv sync --dev + - name: Run integration tests + run: make integration_tests_no_api_keys + - name: Minimize uv cache + run: uv cache prune --ci test-cli: name: Test CLI - Python ${{ matrix.python-version }} runs-on: ubuntu-latest @@ -67,18 +114,25 @@ jobs: matrix: python-version: ${{ fromJson(inputs.python-versions || '["3.10", "3.11", "3.12"]') }} steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" + - name: Check out the code at a specific ref + uses: actions/checkout@v4 with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} + ref: ${{ inputs.ref || github.ref }} + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Check Version id: check-version + # We need to print $3 because langflow-base is a dependency of langflow + # For langlow we'd use print $2 run: | - version=$(cd src/backend/base && poetry version --short) - last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) + version=$(uv tree | grep 'langflow-base' | awk '{print $3}' | sed 's/^v//') + url="https://pypi.org/pypi/langflow-base/json" + if [ ${{ inputs.nightly }} == true ]; then + url="https://pypi.org/pypi/langflow-base-nightly/json" + fi + + last_released_version=$(curl -s $url | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) if [ "$version" != "$last_released_version" ]; then echo "Version $version has not been released yet. Skipping the rest of the job." echo skipped=true >> $GITHUB_OUTPUT @@ -90,7 +144,6 @@ jobs: - name: Build wheel if: steps.check-version.outputs.skipped == 'false' run: | - poetry env use ${{ matrix.python-version }} make build main=true - name: Install wheel if: steps.check-version.outputs.skipped == 'false' @@ -105,11 +158,13 @@ jobs: timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/api/v1/auto_login; do sleep 5; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1) # Terminate the server kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1) - sleep 10 # give the server some time to terminate + sleep 20 # give the server some time to terminate # Check if the server is still running if kill -0 $SERVER_PID 2>/dev/null; then echo "Failed to terminate the server" - exit 1 + exit 0 else echo "Server terminated successfully" fi + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 90dd618f50a9..61a3e2dee79c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,16 +24,23 @@ on: required: true type: boolean default: false + build_docker_ep: + description: "Build Docker Image for Langflow with Entrypoint" + required: false + type: boolean + default: false pre_release: description: "Pre-release" required: false type: boolean default: false + create_release: + description: "Whether to create a gh release" + required: false + type: boolean + default: true -env: - POETRY_VERSION: "1.8.2" - jobs: ci: if: ${{ github.event.inputs.release_package_base == 'true' || github.event.inputs.release_package_main == 'true' }} @@ -52,22 +59,16 @@ jobs: version: ${{ steps.check-version.outputs.version }} skipped: ${{ steps.check-version.outputs.skipped }} steps: - - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry==${{ env.POETRY_VERSION }} - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - cache: "poetry" - - name: Set up Nodejs 20 - uses: actions/setup-node@v4 - with: - node-version: "20" + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Environment + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev - name: Check Version id: check-version run: | - version=$(cd src/backend/base && poetry version --short) + version=$(uv tree | grep 'langflow-base' | awk '{print $3}' | sed 's/^v//') last_released_version=$(curl -s "https://pypi.org/pypi/langflow-base/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) if [ "$version" = "$last_released_version" ]; then echo "Version $version is already released. Skipping release." @@ -79,30 +80,33 @@ jobs: fi - name: Build project for distribution if: steps.check-version.outputs.skipped == 'false' - run: make build base=true + run: make build base=true args="--wheel" - name: Test CLI if: steps.check-version.outputs.skipped == 'false' run: | - python -m pip install src/backend/base/dist/*.whl - python -m langflow run --host 127.0.0.1 --port 7860 & + # TODO: Unsure why the whl is not built in src/backend/base/dist + mkdir src/backend/base/dist + mv dist/*.whl src/backend/base/dist + uv pip install src/backend/base/dist/*.whl + uv run python -m langflow run --host 127.0.0.1 --port 7860 --backend-only & SERVER_PID=$! # Wait for the server to start timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/api/v1/auto_login; do sleep 2; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1) # Terminate the server kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1) - sleep 10 # give the server some time to terminate + sleep 20 # give the server some time to terminate # Check if the server is still running if kill -0 $SERVER_PID 2>/dev/null; then echo "Failed to terminate the server" - exit 1 + exit 0 else echo "Server terminated successfully" fi - name: Publish to PyPI - if: steps.check-version.outputs.skipped == 'false' env: - POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} - run: make publish base=true + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + run: | + make publish base=true - name: Upload Artifact if: steps.check-version.outputs.skipped == 'false' uses: actions/upload-artifact@v4 @@ -118,24 +122,19 @@ jobs: outputs: version: ${{ steps.check-version.outputs.version }} steps: - - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry==${{ env.POETRY_VERSION }} - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: "3.12" - cache: "poetry" - - name: Set up Nodejs 20 - uses: actions/setup-node@v4 - with: - node-version: "20" + - name: Checkout code + uses: actions/checkout@v4 + - name: Setup Environment + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev + # If pre-release is true, we need to check if ["a", "b", "rc", "dev", "post"] is in the version string # if the version string is incorrect, we need to exit the workflow - name: Check if pre-release if: inputs.pre_release == 'true' run: | - version=$(poetry version --short) + version=$(uv tree | grep 'langflow' | grep -v 'langflow-base' | awk '{print $2}' | sed 's/^v//') if [[ "${version}" =~ ^([0-9]+\.)?([0-9]+\.)?[0-9]+((a|b|rc|dev|post)([0-9]+))$ ]]; then echo "Pre-release version detected. Continuing with the release." else @@ -145,7 +144,7 @@ jobs: - name: Check Version id: check-version run: | - version=$(poetry version --short) + version=$(uv tree | grep 'langflow' | grep -v 'langflow-base' | awk '{print $2}' | sed 's/^v//') last_released_version=$(curl -s "https://pypi.org/pypi/langflow/json" | jq -r '.releases | keys | .[]' | sort -V | tail -n 1) if [ "$version" = "$last_released_version" ]; then echo "Version $version is already released. Skipping release." @@ -155,31 +154,32 @@ jobs: fi - name: Wait for PyPI Propagation if: needs.release-base.outputs.skipped == 'false' - run: sleep 300 # wait for 5 minutes to ensure PyPI propagation + run: sleep 300 # wait for 5 minutes to ensure PyPI propagation - name: Build project for distribution - run: make build main=true + run: make build main=true args="--no-sources --wheel" - name: Test CLI run: | - python -m pip install dist/*.whl - python -m langflow run --host 127.0.0.1 --port 7860 --backend-only & + uv pip install dist/*.whl + uv run python -m langflow run --host 127.0.0.1 --port 7860 --backend-only & SERVER_PID=$! # Wait for the server to start timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/health_check; do sleep 2; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1) # Terminate the server kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1) - sleep 10 # give the server some time to terminate + sleep 20 # give the server some time to terminate # Check if the server is still running if kill -0 $SERVER_PID 2>/dev/null; then echo "Failed to terminate the server" - exit 1 + exit 0 else echo "Server terminated successfully" fi - name: Publish to PyPI env: - POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} - run: make publish main=true + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + run: | + make publish main=true - name: Upload Artifact uses: actions/upload-artifact@v4 with: @@ -188,36 +188,38 @@ jobs: call_docker_build_base: name: Call Docker Build Workflow for Langflow Base - if : inputs.build_docker_base == true + if: inputs.build_docker_base == true + needs: [release-base, release-main] uses: ./.github/workflows/docker-build.yml - strategy: - matrix: - release_type: - - base with: - # version should be needs.release-base.outputs.version if release_type is base - # version should be needs.release-main.outputs.version if release_type is main - version: '' - release_type: ${{ matrix.release_type }} + base_version: ${{ needs.release-base.outputs.version }} + main_version: ${{ needs.release-main.outputs.version }} + release_type: base pre_release: ${{ inputs.pre_release }} secrets: inherit call_docker_build_main: name: Call Docker Build Workflow for Langflow - if : inputs.build_docker_main == true + if: inputs.build_docker_main == true + needs: [release-main] uses: ./.github/workflows/docker-build.yml - strategy: - matrix: - release_type: - - main with: - # version should be needs.release-base.outputs.version if release_type is base - # version should be needs.release-main.outputs.version if release_type is main - version: '' - release_type: ${{ matrix.release_type }} + main_version: ${{ needs.release-main.outputs.version }} + release_type: main pre_release: ${{ inputs.pre_release }} secrets: inherit + call_docker_build_main_ep: + name: Call Docker Build Workflow for Langflow with Entrypoint + if: inputs.build_docker_ep == true + needs: [release-main] + uses: ./.github/workflows/docker-build.yml + with: + main_version: ${{ needs.release-main.outputs.version }} + release_type: main-ep + pre_release: False + secrets: inherit + create_release: name: Create Release runs-on: ubuntu-latest @@ -236,4 +238,4 @@ jobs: generateReleaseNotes: true prerelease: ${{ inputs.pre_release }} tag: v${{ needs.release-main.outputs.version }} - commit: ${{ github.ref }} \ No newline at end of file + commit: ${{ github.ref }} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml new file mode 100644 index 000000000000..88c1e0420cd3 --- /dev/null +++ b/.github/workflows/release_nightly.yml @@ -0,0 +1,233 @@ +name: Langflow Nightly Build +run-name: Langflow Nightly Release by @${{ github.actor }} + +on: + workflow_dispatch: + inputs: + build_docker_base: + description: "Build Docker Image for Langflow Nightly Base" + required: true + type: boolean + default: false + build_docker_main: + description: "Build Docker Image for Langflow Nightly" + required: true + type: boolean + default: false + build_docker_ep: + description: "Build Docker Image for Langflow Nightly with Entrypoint" + required: false + type: boolean + default: false + nightly_tag_main: + description: "Tag for the nightly main build" + required: true + type: string + nightly_tag_base: + description: "Tag for the nightly base build" + required: true + type: string + workflow_call: + inputs: + build_docker_base: + description: "Build Docker Image for Langflow Nightly Base" + required: true + type: boolean + default: false + build_docker_main: + description: "Build Docker Image for Langflow Nightly" + required: true + type: boolean + default: false + build_docker_ep: + description: "Build Docker Image for Langflow Nightly with Entrypoint" + required: false + type: boolean + default: false + nightly_tag_main: + description: "Tag for the nightly main build" + required: true + type: string + nightly_tag_base: + description: "Tag for the nightly base build" + required: true + type: string + +env: + POETRY_VERSION: "1.8.3" + PYTHON_VERSION: "3.12" + +jobs: + release-nightly-base: + name: Release Langflow Nightly Base + runs-on: ubuntu-latest + defaults: + run: + shell: bash + outputs: + version: ${{ steps.verify.outputs.version }} + steps: + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + with: + ref: ${{ inputs.nightly_tag_main }} + persist-credentials: true + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev + + - name: Verify Nightly Name and Version + id: verify + run: | + name=$(uv tree | grep 'langflow-base' | awk '{print $2}') + version=$(uv tree | grep 'langflow-base' | awk '{print $3}') + if [ "$name" != "langflow-base-nightly" ]; then + echo "Name $name does not match langflow-base-nightly. Exiting the workflow." + exit 1 + fi + if [ "$version" != "${{ inputs.nightly_tag_base }}" ]; then + echo "Version $version does not match nightly tag ${{ inputs.nightly_tag_base }}. Exiting the workflow." + exit 1 + fi + # Strip the leading `v` from the version + version=$(echo $version | sed 's/^v//') + echo "version=$version" >> $GITHUB_OUTPUT + + - name: Build project for distribution + run: make build base=true args="--wheel" + + - name: Test CLI + run: | + # TODO: Unsure why the whl is not built in src/backend/base/dist + mkdir src/backend/base/dist + mv dist/*.whl src/backend/base/dist/ + uv pip install src/backend/base/dist/*.whl + uv run python -m langflow run --host 127.0.0.1 --port 7860 --backend-only & + SERVER_PID=$! + # Wait for the server to start + timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/api/v1/auto_login; do sleep 2; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1) + # Terminate the server + kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1) + sleep 20 # give the server some time to terminate + # Check if the server is still running + if kill -0 $SERVER_PID 2>/dev/null; then + echo "Failed to terminate the server" + exit 0 + else + echo "Server terminated successfully" + fi + + - name: Publish to PyPI + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + run: | + make publish base=true + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: dist-base + path: src/backend/base/dist + + release-nightly-main: + name: Release Langflow Nightly Main + needs: [release-nightly-base] + runs-on: ubuntu-latest + outputs: + version: ${{ steps.verify.outputs.version }} + defaults: + run: + shell: bash + steps: + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + with: + ref: ${{ inputs.nightly_tag_main}} + persist-credentials: true + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev + + - name: Verify Nightly Name and Version + id: verify + run: | + name=$(uv tree | grep 'langflow' | grep -v 'langflow-base' | awk '{print $1}') + version=$(uv tree | grep 'langflow' | grep -v 'langflow-base' | awk '{print $2}') + if [ "$name" != "langflow-nightly" ]; then + echo "Name $name does not match langflow-nightly. Exiting the workflow." + exit 1 + fi + if [ "$version" != "${{ inputs.nightly_tag_main }}" ]; then + echo "Version $version does not match nightly tag ${{ inputs.nightly_tag_main }}. Exiting the workflow." + exit 1 + fi + # Strip the leading `v` from the version + version=$(echo $version | sed 's/^v//') + echo "version=$version" >> $GITHUB_OUTPUT + - name: Wait for PyPI Propagation + run: sleep 300 # wait for 5 minutes to ensure PyPI propagation of base + + - name: Build project for distribution + run: make build main=true args="--no-sources --wheel" + - name: Test CLI + run: | + uv pip install dist/*.whl + uv run python -m langflow run --host 127.0.0.1 --port 7860 --backend-only & + SERVER_PID=$! + # Wait for the server to start + timeout 120 bash -c 'until curl -f http://127.0.0.1:7860/health_check; do sleep 2; done' || (echo "Server did not start in time" && kill $SERVER_PID && exit 1) + # Terminate the server + kill $SERVER_PID || (echo "Failed to terminate the server" && exit 1) + sleep 20 # give the server some time to terminate + # Check if the server is still running + if kill -0 $SERVER_PID 2>/dev/null; then + echo "Failed to terminate the server" + exit 0 + else + echo "Server terminated successfully" + fi + - name: Publish to PyPI + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} + UV_PUBLISH_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + run: | + make publish main=true + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: dist-main + path: dist + + call_docker_build_base: + name: Call Docker Build Workflow for Langflow Base + if: always() && ${{ inputs.build_docker_base == 'true' }} + needs: [release-nightly-base, release-nightly-main] + uses: ./.github/workflows/docker-build.yml + with: + release_type: nightly-base + base_version: ${{ inputs.nightly_tag_base }} + main_version: ${{ inputs.nightly_tag_main }} + secrets: inherit + + call_docker_build_main: + name: Call Docker Build Workflow for Langflow + if: always() && ${{ inputs.build_docker_main == 'true' }} + needs: [release-nightly-main] + uses: ./.github/workflows/docker-build.yml + with: + release_type: nightly-main + main_version: ${{ inputs.nightly_tag_main }} + secrets: inherit + + call_docker_build_main_ep: + name: Call Docker Build Workflow for Langflow with Entrypoint + if: always() && ${{ inputs.build_docker_ep == 'true' }} + needs: [release-nightly-main] + uses: ./.github/workflows/docker-build.yml + with: + release_type: main-ep + main_version: ${{ inputs.nightly_tag_main }} + secrets: inherit diff --git a/.github/workflows/scheduled_integration_test.yml b/.github/workflows/scheduled_integration_test.yml deleted file mode 100644 index 56afc035f2ea..000000000000 --- a/.github/workflows/scheduled_integration_test.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: Integration tests - -on: - workflow_dispatch: - inputs: - branch: - description: "(Optional) Branch to checkout" - required: false - type: string - schedule: - - cron: "0 0 */2 * *" # Run every 2 days - -env: - POETRY_VERSION: "1.8.2" - -jobs: - test-integration: - name: Run Integration Tests - runs-on: ubuntu-latest - strategy: - max-parallel: 1 # Currently, we can only run at a time for collection-per-db-constraints - matrix: - python-version: - - "3.12" - - "3.11" - - "3.10" - env: - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }} - ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ inputs.branch || github.ref }} - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" - with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Install Python dependencies - run: | - poetry env use ${{ matrix.python-version }} - poetry install - - name: Run integration tests - timeout-minutes: 12 - run: | - make integration_tests diff --git a/.github/workflows/style-check-py.yml b/.github/workflows/style-check-py.yml index dfab075ce67e..25abc5e4319d 100644 --- a/.github/workflows/style-check-py.yml +++ b/.github/workflows/style-check-py.yml @@ -3,11 +3,12 @@ name: Ruff Style Check on: pull_request: types: [opened, synchronize, reopened, auto_merge_enabled] + paths: + - "**/*.py" + -env: - POETRY_VERSION: "1.8.2" jobs: lint: @@ -18,18 +19,13 @@ jobs: python-version: - "3.12" steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" - with: - python-version: ${{ matrix.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ hashFiles('**/poetry.lock') }} - - name: Install Python dependencies - run: | - poetry env use ${{ matrix.python-version }} - poetry install + - name: Check out the code at a specific ref + uses: actions/checkout@v4 + - name: "Setup Environment" + uses: ./.github/actions/setup-uv - name: Register problem matcher run: echo "::add-matcher::.github/workflows/matchers/ruff.json" - name: Run Ruff Check - run: poetry run ruff check --output-format=github . + run: uv run --only-dev ruff check --output-format=github . + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml index 4bcd9834dcfc..2ef7e6579f29 100644 --- a/.github/workflows/typescript_test.yml +++ b/.github/workflows/typescript_test.yml @@ -13,12 +13,12 @@ on: required: false type: string default: "tests" - workflow_dispatch: - inputs: - branch: - description: "(Optional) Branch to checkout" + ref: + description: "(Optional) ref to checkout" required: false type: string + workflow_dispatch: + inputs: tests_folder: description: "(Optional) Tests to run" required: false @@ -26,7 +26,6 @@ on: default: "tests" env: - POETRY_VERSION: "1.8.3" NODE_VERSION: "21" PYTHON_VERSION: "3.12" # Define the directory where Playwright browsers will be installed. @@ -42,10 +41,11 @@ jobs: matrix: shardIndex: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] shardTotal: [10] + env: OPENAI_API_KEY: ${{ inputs.openai_api_key || secrets.OPENAI_API_KEY }} STORE_API_KEY: ${{ inputs.store_api_key || secrets.STORE_API_KEY }} - BRAVE_SEARCH_API_KEY: "${{ secrets.BRAVE_SEARCH_API_KEY }}" + SEARCH_API_KEY: "${{ secrets.SEARCH_API_KEY }}" ASTRA_DB_APPLICATION_TOKEN: "${{ secrets.ASTRA_DB_APPLICATION_TOKEN }}" ASTRA_DB_API_ENDPOINT: "${{ secrets.ASTRA_DB_API_ENDPOINT }}" outputs: @@ -54,9 +54,9 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - # If branch is passed as input, checkout that branch - # else checkout the default branch - ref: ${{ github.event.inputs.branch || github.ref }} + # If ref is passed as input, checkout that ref + # else checkout the default ref + ref: ${{ inputs.ref || github.ref }} - name: Setup Node.js uses: actions/setup-node@v4 @@ -103,17 +103,10 @@ jobs: npx playwright install-deps if: steps.playwright-cache.outputs.cache-hit != 'true' - - name: Set up Python ${{ env.PYTHON_VERSION }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_caching" - with: - python-version: ${{ env.PYTHON_VERSION }} - poetry-version: ${{ env.POETRY_VERSION }} - cache-key: tests - - - name: Install Python dependencies - run: | - poetry env use ${{ env.PYTHON_VERSION }} - poetry install + - name: "Setup Environment" + uses: ./.github/actions/setup-uv + - name: Install the project + run: uv sync --dev - name: create .env run: | @@ -127,6 +120,7 @@ jobs: max_attempts: 2 command: | cd src/frontend + npx playwright test ${{ inputs.tests_folder }} --shard ${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --list npx playwright test ${{ inputs.tests_folder }} --trace on --shard ${{ matrix.shardIndex }}/${{ matrix.shardTotal }} --workers 2 - name: Upload blob report to GitHub Actions Artifacts @@ -137,6 +131,9 @@ jobs: path: src/frontend/blob-report retention-days: 1 + - name: Minimize uv cache + run: uv cache prune --ci + merge-reports: # We need to repeat the condition at every step # https://github.com/actions/runner/issues/662 diff --git a/.gitignore b/.gitignore index f3fb37a39f83..a73b28fff385 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ yarn-error.log* lerna-debug.log* qdrant_storage +.dspy_cache # Mac .DS_Store @@ -273,3 +274,6 @@ src/frontend/temp *-shm *-wal .history + +.dspy_cache/ +cache.db diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 16599a199c7b..42b3ae39bba5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,3 @@ -fail_fast: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.1.0 @@ -12,3 +11,15 @@ repos: args: - --fix=lf - id: trailing-whitespace + - repo: local + hooks: + - id: ruff-check + name: ruff check + language: system + entry: bash -c "uv run ruff check" + types: [file, python] + - id: ruff-format + name: ruff format + language: system + entry: bash -c "uv run ruff format" + types: [file, python] diff --git a/.vscode/launch.json b/.vscode/launch.json index 1f66413d183f..1bc163fb5815 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,6 +6,8 @@ "type": "debugpy", "request": "launch", "module": "uvicorn", + "preLaunchTask": "Install Backend", + "args": [ "--factory", "langflow.main:create_app", diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 25b480b27a28..5142610af1f1 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -24,7 +24,7 @@ { "label": "Test", "type": "shell", - "command": "make tests" + "command": "make unit_tests" }, // make lint { @@ -43,6 +43,12 @@ "label": "Install", "type": "shell", "command": "make install_backend && make install_frontend" + }, + // install backend + { + "label": "Install Backend", + "type": "shell", + "command": "make install_backend" } ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7247d2b7a6ba..d624c2a23db8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,7 +6,6 @@ to contributions, whether it be in the form of a new feature, improved infra, or To contribute to this project, please follow the [fork and pull request](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. - ## Reporting bugs or suggesting improvements Our [GitHub issues](https://github.com/langflow-ai/langflow/issues) page is kept up to date @@ -32,10 +31,9 @@ so that more people can benefit from it. [collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so it only becomes visible on click, making the issue easier to read and follow. - ## Contributing code and documentation -You can develop Langflow locally via Poetry + NodeJS or docker-compose. +You can develop Langflow locally via uv + NodeJS. ### Clone the Langflow Repository @@ -50,15 +48,16 @@ git remote add fork https://github.com//langflow.git We also provide a .vscode/launch.json file for debugging the backend in VSCode, which is a lot faster than using docker compose. ### Prepare the environment + Setting up hooks: ```bash make init ``` -This will install the pre-commit hooks, which will run `make format` on every commit. +This will set up the development environment by installing backend and frontend dependencies, building the frontend static files, and initializing the project. It runs `make install_backend`, `make install_frontend`, `make build_frontend`, and finally `uv run langflow run` to start the application. -It is advised to run `make lint` before pushing to the repository. +It is advised to run `make lint`, `make format`, and `make unit_tests` before pushing to the repository. ### Run locally (Poetry and Node.js) @@ -66,7 +65,7 @@ Langflow can run locally by cloning the repository and installing the dependenci Before you start, make sure you have the following installed: -- Poetry (>=1.4) +- uv (>=0.4) - Node.js Then, in the root folder, install the dependencies and start the development server for the backend: @@ -81,17 +80,6 @@ And the frontend: make frontend ``` - -### Run locally (docker compose) - -The following snippet will run the backend and frontend in separate containers. The frontend will be available at `localhost:3000` and the backend at `localhost:7860`. - -```bash -docker compose up --build -# or -make dev build=1 -``` - ### Run documentation The documentation is built using [Docusaurus](https://docusaurus.io/). To run the documentation locally, run the following commands: @@ -104,8 +92,8 @@ npm run start The documentation will be available at `localhost:3000` and all the files are located in the `docs/docs` folder. - ## Opening a pull request + Once you wrote and manually tested your change, you can start sending the patch to the main repository. - Open a new GitHub pull request with the patch against the `main` branch. diff --git a/Makefile b/Makefile index 71c7baa56905..cd5aaf9dd4a8 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ DOCKERFILE=docker/build_and_push.Dockerfile DOCKERFILE_BACKEND=docker/build_and_push_backend.Dockerfile DOCKERFILE_FRONTEND=docker/frontend/build_and_push_frontend.Dockerfile DOCKER_COMPOSE=docker_example/docker-compose.yml -PYTHON_REQUIRED=$(shell grep '^python[[:space:]]*=' pyproject.toml | sed -n 's/.*"\([^"]*\)".*/\1/p') +PYTHON_REQUIRED=$(shell grep '^requires-python[[:space:]]*=' pyproject.toml | sed -n 's/.*"\([^"]*\)".*/\1/p') RED=\033[0;31m NC=\033[0m # No Color GREEN=\033[0;32m @@ -19,6 +19,8 @@ open_browser ?= true path = src/backend/base/langflow/frontend workers ?= 1 async ?= true +lf ?= false +ff ?= true all: help ###################### @@ -35,9 +37,8 @@ patch: ## bump the version in langflow and langflow-base # check for required tools check_tools: - @command -v poetry >/dev/null 2>&1 || { echo >&2 "$(RED)Poetry is not installed. Aborting.$(NC)"; exit 1; } + @command -v uv >/dev/null 2>&1 || { echo >&2 "$(RED)uv is not installed. Aborting.$(NC)"; exit 1; } @command -v npm >/dev/null 2>&1 || { echo >&2 "$(RED)NPM is not installed. Aborting.$(NC)"; exit 1; } - @command -v docker >/dev/null 2>&1 || { echo >&2 "$(RED)Docker is not installed. Aborting.$(NC)"; exit 1; } @command -v pipx >/dev/null 2>&1 || { echo >&2 "$(RED)pipx is not installed. Aborting.$(NC)"; exit 1; } @$(MAKE) check_env @echo "$(GREEN)All required tools are installed.$(NC)" @@ -45,10 +46,7 @@ check_tools: # check if Python version is compatible check_env: ## check if Python version is compatible @chmod +x scripts/setup/check_env.sh - @PYTHON_INSTALLED=$$(scripts/setup/check_env.sh python --version 2>&1 | awk '{print $$2}'); \ - if ! scripts/setup/check_env.sh python -c "import sys; from packaging.specifiers import SpecifierSet; from packaging.version import Version; sys.exit(not SpecifierSet('$(PYTHON_REQUIRED)').contains(Version('$$PYTHON_INSTALLED')))" 2>/dev/null; then \ - echo "$(RED)Error: Python version $$PYTHON_INSTALLED is not compatible with the required version $(PYTHON_REQUIRED). Aborting.$(NC)"; exit 1; \ - fi + @scripts/setup/check_env.sh "$(PYTHON_REQUIRED)" help: ## show this help message @echo '----' @@ -61,25 +59,30 @@ help: ## show this help message # INSTALL PROJECT ###################### +reinstall_backend: ## forces reinstall all dependencies (no caching) + @echo 'Installing backend dependencies' + @uv sync -n --reinstall --frozen + install_backend: ## install the backend dependencies @echo 'Installing backend dependencies' - @poetry install + @uv sync --frozen install_frontend: ## install the frontend dependencies @echo 'Installing frontend dependencies' - cd src/frontend && npm install + @cd src/frontend && npm install > /dev/null 2>&1 build_frontend: ## build the frontend static files - cd src/frontend && CI='' npm run build - rm -rf src/backend/base/langflow/frontend - cp -r src/frontend/build src/backend/base/langflow/frontend + @echo 'Building frontend static files' + @cd src/frontend && CI='' npm run build > /dev/null 2>&1 + @rm -rf src/backend/base/langflow/frontend + @cp -r src/frontend/build src/backend/base/langflow/frontend init: check_tools clean_python_cache clean_npm_cache ## initialize the project - make install_backend - make install_frontend - make build_frontend + @make install_backend + @make install_frontend + @make build_frontend @echo "$(GREEN)All requirements are installed.$(NC)" - python -m langflow run + @uv run langflow run ###################### # CLEAN PROJECT @@ -91,6 +94,7 @@ clean_python_cache: find . -type f -name '*.py[cod]' -exec rm -f {} + find . -type f -name '*~' -exec rm -f {} + find . -type f -name '.*~' -exec rm -f {} + + find . -type d -empty -delete @echo "$(GREEN)Python cache cleaned.$(NC)" clean_npm_cache: @@ -102,21 +106,21 @@ clean_npm_cache: clean_all: clean_python_cache clean_npm_cache # clean all caches and temporary directories @echo "$(GREEN)All caches and temporary directories cleaned.$(NC)" -setup_poetry: ## install poetry using pipx - pipx install poetry +setup_uv: ## install poetry using pipx + pipx install uv add: @echo 'Adding dependencies' ifdef devel - cd src/backend/base && poetry add --group dev $(devel) + @cd src/backend/base && uv add --group dev $(devel) endif ifdef main - poetry add $(main) + @uv add $(main) endif ifdef base - cd src/backend/base && poetry add $(base) + @cd src/backend/base && uv add $(base) endif @@ -126,29 +130,39 @@ endif ###################### coverage: ## run the tests and generate a coverage report - @poetry run coverage run - @poetry run coverage erase + @uv run coverage run + @uv run coverage erase unit_tests: ## run unit tests -ifeq ($(async), true) - poetry run pytest src/backend/tests \ - --ignore=src/backend/tests/integration \ - --instafail -n auto -ra -m "not api_key_required" \ - --durations-path src/backend/tests/.test_durations \ - --splitting-algorithm least_duration \ + @uv sync --extra dev --frozen + @EXTRA_ARGS="" + @if [ "$(async)" = "true" ]; then \ + EXTRA_ARGS="$$EXTRA_ARGS --instafail -n auto"; \ + fi; \ + if [ "$(lf)" = "true" ]; then \ + EXTRA_ARGS="$$EXTRA_ARGS --lf"; \ + fi; \ + if [ "$(ff)" = "true" ]; then \ + EXTRA_ARGS="$$EXTRA_ARGS --ff"; \ + fi; \ + uv run pytest src/backend/tests --ignore=src/backend/tests/integration $$EXTRA_ARGS --instafail -ra -m 'not api_key_required' --durations-path src/backend/tests/.test_durations --splitting-algorithm least_duration $(args) + +unit_tests_looponfail: + @make unit_tests args="-f" + +integration_tests: + uv run pytest src/backend/tests/integration \ + --instafail -ra \ $(args) -else - poetry run pytest src/backend/tests \ - --ignore=src/backend/tests/integration \ + +integration_tests_no_api_keys: + uv run pytest src/backend/tests/integration \ --instafail -ra -m "not api_key_required" \ - --durations-path src/backend/tests/.test_durations \ - --splitting-algorithm least_duration \ $(args) -endif -integration_tests: ## run integration tests - poetry run pytest src/backend/tests/integration \ - --instafail -ra \ +integration_tests_api_keys: + uv run pytest src/backend/tests/integration \ + --instafail -ra -m "api_key_required" \ $(args) tests: ## run unit, integration, coverage tests @@ -172,37 +186,35 @@ fix_codespell: ## run codespell to fix spelling errors poetry run codespell --toml pyproject.toml --write format: ## run code formatters - poetry run ruff check . --fix - poetry run ruff format . - cd src/frontend && npm run format + @uv run ruff check . --fix + @uv run ruff format . + @cd src/frontend && npm run format + +unsafe_fix: + @uv run ruff check . --fix --unsafe-fixes -lint: ## run linters - poetry run mypy --namespace-packages -p "langflow" +lint: install_backend ## run linters + @uv run mypy --namespace-packages -p "langflow" install_frontendci: - cd src/frontend && npm ci + @cd src/frontend && npm ci > /dev/null 2>&1 install_frontendc: - cd src/frontend && rm -rf node_modules package-lock.json && npm install + @cd src/frontend && rm -rf node_modules package-lock.json && npm install > /dev/null 2>&1 run_frontend: ## run the frontend @-kill -9 `lsof -t -i:3000` - cd src/frontend && npm start + @cd src/frontend && npm start tests_frontend: ## run frontend tests ifeq ($(UI), true) - cd src/frontend && npx playwright test --ui --project=chromium + @cd src/frontend && npx playwright test --ui --project=chromium else - cd src/frontend && npx playwright test --project=chromium + @cd src/frontend && npx playwright test --project=chromium endif -run_cli: +run_cli: install_frontend install_backend build_frontend ## run the CLI @echo 'Running the CLI' - @make install_frontend > /dev/null - @echo 'Install backend dependencies' - @make install_backend > /dev/null - @echo 'Building the frontend' - @make build_frontend > /dev/null ifdef env @make start env=$(env) host=$(host) port=$(port) log_level=$(log_level) else @@ -226,16 +238,16 @@ start: @echo 'Running the CLI' ifeq ($(open_browser),false) - @make install_backend && poetry run langflow run \ - --path $(path) \ + @make install_backend && uv run langflow run \ + --frontend-path $(path) \ --log-level $(log_level) \ --host $(host) \ --port $(port) \ --env-file $(env) \ --no-open-browser else - @make install_backend && poetry run langflow run \ - --path $(path) \ + @make install_backend && uv run langflow run \ + --frontend-path $(path) \ --log-level $(log_level) \ --host $(host) \ --port $(port) \ @@ -246,104 +258,84 @@ setup_devcontainer: ## set up the development container make install_backend make install_frontend make build_frontend - poetry run langflow --path src/frontend/build + uv run langflow --frontend-path src/frontend/build setup_env: ## set up the environment @sh ./scripts/setup/setup_env.sh -frontend: ## run the frontend in development mode - make install_frontend +frontend: install_frontend ## run the frontend in development mode make run_frontend -frontendc: - make install_frontendc +frontendc: install_frontendc make run_frontend - -backend: ## run the backend in development mode - @echo 'Setting up the environment' - @make setup_env - make install_backend - @-kill -9 $$(lsof -t -i:7860) +backend: setup_env install_backend ## run the backend in development mode + @-kill -9 $$(lsof -t -i:7860) || true ifdef login @echo "Running backend autologin is $(login)"; - LANGFLOW_AUTO_LOGIN=$(login) poetry run uvicorn \ + LANGFLOW_AUTO_LOGIN=$(login) uv run uvicorn \ --factory langflow.main:create_app \ --host 0.0.0.0 \ --port $(port) \ - --reload \ + $(if $(filter-out 1,$(workers)),, --reload) \ --env-file $(env) \ --loop asyncio \ - --workers $(workers) + $(if $(workers),--workers $(workers),) else @echo "Running backend respecting the $(env) file"; - poetry run uvicorn \ + uv run uvicorn \ --factory langflow.main:create_app \ --host 0.0.0.0 \ --port $(port) \ - --reload \ + $(if $(filter-out 1,$(workers)),, --reload) \ --env-file $(env) \ --loop asyncio \ - --workers $(workers) + $(if $(workers),--workers $(workers),) endif -build_and_run: ## build the project and run it - @echo 'Removing dist folder' - @make setup_env +build_and_run: setup_env ## build the project and run it rm -rf dist rm -rf src/backend/base/dist make build - poetry run pip install dist/*.tar.gz - poetry run langflow run + uv run pip install dist/*.tar.gz + uv run langflow run build_and_install: ## build the project and install it @echo 'Removing dist folder' rm -rf dist rm -rf src/backend/base/dist - make build && poetry run pip install dist/*.whl && pip install src/backend/base/dist/*.whl --force-reinstall + make build && uv run pip install dist/*.whl && pip install src/backend/base/dist/*.whl --force-reinstall -build: ## build the frontend static files and package the project - @echo 'Building the project' - @make setup_env +build: setup_env ## build the frontend static files and package the project ifdef base make install_frontendci make build_frontend - make build_langflow_base + make build_langflow_base args="$(args)" endif ifdef main make install_frontendci make build_frontend - make build_langflow_base - make build_langflow + make build_langflow_base args="$(args)" + make build_langflow args="$(args)" endif build_langflow_base: - cd src/backend/base && poetry build + cd src/backend/base && uv build $(args) rm -rf src/backend/base/langflow/frontend build_langflow_backup: - poetry lock && poetry build + uv lock && uv build build_langflow: - cd ./scripts && poetry run python update_dependencies.py - poetry lock --no-update - poetry build + uv lock --no-upgrade + uv build $(args) ifdef restore mv pyproject.toml.bak pyproject.toml - mv poetry.lock.bak poetry.lock + mv uv.lock.bak uv.lock endif -dev: ## run the project in development mode with docker compose - make install_frontend -ifeq ($(build),1) - @echo 'Running docker compose up with build' - docker compose $(if $(debug),-f docker-compose.debug.yml) up --build -else - @echo 'Running docker compose up without build' - docker compose $(if $(debug),-f docker-compose.debug.yml) up -endif docker_build: dockerfile_build clear_dockerimage ## build DockerFile @@ -391,26 +383,34 @@ dcdev_up: docker compose -f docker/dev.docker-compose.yml up --remove-orphans lock_base: - cd src/backend/base && poetry lock + cd src/backend/base && uv lock lock_langflow: - poetry lock + uv lock lock: ## lock dependencies @echo 'Locking dependencies' - cd src/backend/base && poetry lock --no-update - poetry lock --no-update + cd src/backend/base && uv lock + uv lock update: ## update dependencies @echo 'Updating dependencies' - cd src/backend/base && poetry update - poetry update + cd src/backend/base && uv sync --upgrade + uv sync --upgrade publish_base: - cd src/backend/base && poetry publish --skip-existing + cd src/backend/base && uv publish publish_langflow: - poetry publish + uv publish + +publish_base_testpypi: + # TODO: update this to use the test-pypi repository + cd src/backend/base && uv publish -r test-pypi + +publish_langflow_testpypi: + # TODO: update this to use the test-pypi repository + uv publish -r test-pypi publish: ## build the frontend static files and package the project and publish it to PyPI @echo 'Publishing the project' @@ -421,3 +421,49 @@ endif ifdef main make publish_langflow endif + +publish_testpypi: ## build the frontend static files and package the project and publish it to PyPI + @echo 'Publishing the project' + +ifdef base + #TODO: replace with uvx twine upload dist/* + poetry config repositories.test-pypi https://test.pypi.org/legacy/ + make publish_base_testpypi +endif + +ifdef main + #TODO: replace with uvx twine upload dist/* + poetry config repositories.test-pypi https://test.pypi.org/legacy/ + make publish_langflow_testpypi +endif + + +# example make alembic-revision message="Add user table" +alembic-revision: ## generate a new migration + @echo 'Generating a new Alembic revision' + cd src/backend/base/langflow/ && uv run alembic revision --autogenerate -m "$(message)" + + +alembic-upgrade: ## upgrade database to the latest version + @echo 'Upgrading database to the latest version' + cd src/backend/base/langflow/ && uv run alembic upgrade head + +alembic-downgrade: ## downgrade database by one version + @echo 'Downgrading database by one version' + cd src/backend/base/langflow/ && uv run alembic downgrade -1 + +alembic-current: ## show current revision + @echo 'Showing current Alembic revision' + cd src/backend/base/langflow/ && uv run alembic current + +alembic-history: ## show migration history + @echo 'Showing Alembic migration history' + cd src/backend/base/langflow/ && uv run alembic history --verbose + +alembic-check: ## check migration status + @echo 'Running alembic check' + cd src/backend/base/langflow/ && uv run alembic check + +alembic-stamp: ## stamp the database with a specific revision + @echo 'Stamping the database with revision $(revision)' + cd src/backend/base/langflow/ && uv run alembic stamp $(revision) diff --git a/README.ES.md b/README.ES.md index acf8c6342bd1..3c2fd27a6f58 100644 --- a/README.ES.md +++ b/README.ES.md @@ -132,7 +132,7 @@ Cada opción se detalla a continuación: - `--help`: Muestra todas las opciones disponibles. - `--host`: Establece el host al que vincular el servidor. Se puede configurar usando la variable de entorno `LANGFLOW_HOST`. El valor predeterminado es `127.0.0.1`. - `--workers`: Establece el número de procesos. Se puede configurar usando la variable de entorno `LANGFLOW_WORKERS`. El valor predeterminado es `1`. -- `--timeout`: Establece el tiempo de espera del worker en segundos. El valor predeterminado es `60`. +- `--worker-timeout`: Establece el tiempo de espera del worker en segundos. El valor predeterminado es `60`. - `--port`: Establece el puerto en el que escuchar. Se puede configurar usando la variable de entorno `LANGFLOW_PORT`. El valor predeterminado es `7860`. - `--env-file`: Especifica la ruta al archivo .env que contiene variables de entorno. El valor predeterminado es `.env`. - `--log-level`: Establece el nivel de registro. Se puede configurar usando la variable de entorno `LANGFLOW_LOG_LEVEL`. El valor predeterminado es `critical`. diff --git a/README.KR.md b/README.KR.md index 52fea323de11..1744a5a95f70 100644 --- a/README.KR.md +++ b/README.KR.md @@ -33,7 +33,7 @@
README in English README in Portuguese - README in Spanish + README in Spanish README in Simplified Chinese README in Japanese README in KOREAN @@ -154,7 +154,7 @@ langflow run [OPTIONS] - `--help`: 사용 가능한 모든 옵션을 표시합니다. - `--host`: 서버를 바인딩할 호스트를 정의합니다. `LANGFLOW_HOST` 환경 변수를 사용하여 설정할 수 있습니다. 기본 값은 `127.0.0.1`입니다. - `--workers`: 작업자 프로세스 수를 설정합니다. `LANGFLOW_WORKERS` 환경 변수를 사용하여 설정할 수 있습니다. 기본 값은 `1`입니다. -- `--timeout`: 작업자 시간 제한을 초 단위로 설정합니다. 기본 값은 `60`입니다. +- `--worker-timeout`: 작업자 시간 제한을 초 단위로 설정합니다. 기본 값은 `60`입니다. - `--port`: 수신할 포트를 설정합니다. `LANGFLOW_PORT` 환경 변수를 사용하여 설정할 수 있습니다. 기본 값은 `7860`입니다. - `--env-file`: 환경 변수가 포함된 .env 파일의 경로를 지정합니다. 기본 값은 `.env`입니다. - `--log-level`: 로깅 수준을 정의합니다. `LANGFLOW_LOG_LEVEL` 환경 변수를 사용하여 설정할 수 있습니다. 기본 값은 `critical`입니다. diff --git a/README.PT.md b/README.PT.md index 10a33e21aac5..df605107424b 100644 --- a/README.PT.md +++ b/README.PT.md @@ -134,7 +134,7 @@ Cada opção é detalhada abaixo: - `--help`: Exibe todas as opções disponíveis. - `--host`: Define o host para vincular o servidor. Pode ser configurado usando a variável de ambiente `LANGFLOW_HOST`. O padrão é `127.0.0.1`. - `--workers`: Define o número de processos. Pode ser configurado usando a variável de ambiente `LANGFLOW_WORKERS`. O padrão é `1`. -- `--timeout`: Define o tempo limite do worker em segundos. O padrão é `60`. +- `--worker-timeout`: Define o tempo limite do worker em segundos. O padrão é `60`. - `--port`: Define a porta para escutar. Pode ser configurado usando a variável de ambiente `LANGFLOW_PORT`. O padrão é `7860`. - `--env-file`: Especifica o caminho para o arquivo .env contendo variáveis de ambiente. O padrão é `.env`. - `--log-level`: Define o nível de log. Pode ser configurado usando a variável de ambiente `LANGFLOW_LOG_LEVEL`. O padrão é `critical`. diff --git a/README.ja.md b/README.ja.md index 579a826a48b5..5205ff395913 100644 --- a/README.ja.md +++ b/README.ja.md @@ -33,7 +33,7 @@
README in English README in Portuguese - README in Spanish + README in Spanish README in Simplified Chinese README in Japanese README in KOREAN @@ -152,7 +152,7 @@ langflow run [OPTIONS] - `--help`: 利用可能なすべてのオプションを表示します。 - `--host`: サーバーをバインドするホストを定義します。`LANGFLOW_HOST`環境変数を使用して設定できます。デフォルトは`127.0.0.1`です。 - `--workers`: ワーカープロセスの数を設定します。`LANGFLOW_WORKERS`環境変数を使用して設定できます。デフォルトは`1`です。 -- `--timeout`: ワーカーのタイムアウトを秒単位で設定します。デフォルトは`60`です。 +- `--worker-timeout`: ワーカーのタイムアウトを秒単位で設定します。デフォルトは`60`です。 - `--port`: リッスンするポートを設定します。`LANGFLOW_PORT`環境変数を使用して設定できます。デフォルトは`7860`です。 - `--env-file`: 環境変数を含む.env ファイルのパスを指定します。デフォルトは`.env`です。 - `--log-level`: ログレベルを定義します。`LANGFLOW_LOG_LEVEL`環境変数を使用して設定できます。デフォルトは`critical`です。 diff --git a/README.zh_CN.md b/README.zh_CN.md index 613c84daeb65..d860319473c7 100644 --- a/README.zh_CN.md +++ b/README.zh_CN.md @@ -28,7 +28,7 @@
README in English README in Portuguese - README in Spanish + README in Spanish README in Simplified Chinese README in Japanese README in KOREAN @@ -134,7 +134,7 @@ langflow run [OPTIONS] - `--help`: 显示所有可用参数。 - `--host`: 定义绑定服务器的主机 host 参数,可以使用 LANGFLOW_HOST 环境变量设置,默认值为 127.0.0.1。 - `--workers`: 设置工作进程的数量,可以使用 LANGFLOW_WORKERS 环境变量设置,默认值为 1。 -- `--timeout`: 设置工作进程的超时时间(秒),默认值为 60。 +- `--worker-timeout`: 设置工作进程的超时时间(秒),默认值为 60。 - `--port`: 设置服务监听的端口,可以使用 LANGFLOW_PORT 环境变量设置,默认值为 7860。 - `--config`: 定义配置文件的路径,默认值为 config.yaml。 - `--env-file`: 指定包含环境变量的 .env 文件路径,默认值为 .env。 diff --git a/docker/build_and_push.Dockerfile b/docker/build_and_push.Dockerfile index aeb948b5f5c1..5259ce501414 100644 --- a/docker/build_and_push.Dockerfile +++ b/docker/build_and_push.Dockerfile @@ -1,7 +1,6 @@ # syntax=docker/dockerfile:1 # Keep this syntax directive! It's used to enable Docker BuildKit - ################################ # BUILDER-BASE # Used to build deps + create our virtual environment @@ -9,76 +8,67 @@ # 1. use python:3.12.3-slim as the base image until https://github.com/pydantic/pydantic-core/issues/1292 gets resolved # 2. do not add --platform=$BUILDPLATFORM because the pydantic binaries must be resolved for the final architecture -FROM python:3.12.3-slim as builder-base - -ENV PYTHONDONTWRITEBYTECODE=1 \ - \ - # pip - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=100 \ - \ - # poetry - # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.8.2 \ - # make poetry install to this location - POETRY_HOME="/opt/poetry" \ - # make poetry create the virtual environment in the project's root - # it gets named `.venv` - POETRY_VIRTUALENVS_IN_PROJECT=true \ - # do not ask any interactive question - POETRY_NO_INTERACTION=1 \ - \ - # paths - # this is where our requirements + virtual environment will live - PYSETUP_PATH="/opt/pysetup" \ - VENV_PATH="/opt/pysetup/.venv" +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS builder + +# Install the project into `/app` +WORKDIR /app + +# Enable bytecode compilation +ENV UV_COMPILE_BYTECODE=1 + +# Copy from the cache instead of linking since it's a mounted volume +ENV UV_LINK_MODE=copy RUN apt-get update \ && apt-get install --no-install-recommends -y \ - # deps for installing poetry - curl \ # deps for building python deps - build-essential npm \ + build-essential \ + # npm + npm \ # gcc gcc \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN --mount=type=cache,target=/root/.cache \ - curl -sSL https://install.python-poetry.org | python3 - +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=README.md,target=README.md \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=src/backend/base/README.md,target=src/backend/base/README.md \ + --mount=type=bind,source=src/backend/base/uv.lock,target=src/backend/base/uv.lock \ + --mount=type=bind,source=src/backend/base/pyproject.toml,target=src/backend/base/pyproject.toml \ + uv sync --frozen --no-install-project --no-editable + +ADD ./src /app/src + +COPY src/frontend /tmp/src/frontend +WORKDIR /tmp/src/frontend +RUN --mount=type=cache,target=/root/.npm \ + npm ci \ + && npm run build \ + && cp -r build /app/src/backend/langflow/frontend \ + && rm -rf /tmp/src/frontend WORKDIR /app -COPY pyproject.toml poetry.lock README.md ./ -COPY src/ ./src -COPY scripts/ ./scripts -RUN python -m pip install requests --user && cd ./scripts && python update_dependencies.py - -# 1. Install the dependencies using the current poetry.lock file to create reproducible builds -# 2. Do not install dev dependencies -# 3. Install all the extras to ensure all optionals are installed as well -# 4. --sync to ensure nothing else is in the environment -# 5. Build the wheel and install "langflow" package (mainly for version) - -# Note: moving to build and installing the wheel will make the docker images not reproducible. -RUN $POETRY_HOME/bin/poetry lock --no-update \ - # install current lock file with fixed dependencies versions \ - # do not install dev dependencies \ - && $POETRY_HOME/bin/poetry install --without dev --sync -E deploy -E couchbase -E cassio \ - && $POETRY_HOME/bin/poetry build -f wheel \ - && $POETRY_HOME/bin/poetry run pip install dist/*.whl +ADD ./pyproject.toml /app/pyproject.toml +ADD ./uv.lock /app/uv.lock +ADD ./README.md /app/README.md + +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-editable ################################ # RUNTIME # Setup user, utilities and copy the virtual environment only ################################ -# 1. use python:3.12.3-slim as the base image until https://github.com/pydantic/pydantic-core/issues/1292 gets resolved -FROM python:3.12.3-slim as runtime +FROM python:3.12.3-slim AS runtime -RUN apt-get -y update \ - && apt-get install --no-install-recommends -y \ - curl \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* +RUN useradd user -u 1000 -g 0 --no-create-home --home-dir /app/data +COPY --from=builder --chown=1000 /app/.venv /app/.venv + +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" LABEL org.opencontainers.image.title=langflow LABEL org.opencontainers.image.authors=['Langflow'] @@ -86,14 +76,10 @@ LABEL org.opencontainers.image.licenses=MIT LABEL org.opencontainers.image.url=https://github.com/langflow-ai/langflow LABEL org.opencontainers.image.source=https://github.com/langflow-ai/langflow -RUN useradd user -u 1000 -g 0 --no-create-home --home-dir /app/data -COPY --from=builder-base --chown=1000 /app/.venv /app/.venv -ENV PATH="/app/.venv/bin:${PATH}" - USER user WORKDIR /app ENV LANGFLOW_HOST=0.0.0.0 ENV LANGFLOW_PORT=7860 -CMD ["python", "-m", "langflow", "run"] +CMD ["langflow", "run"] \ No newline at end of file diff --git a/docker/build_and_push_base.Dockerfile b/docker/build_and_push_base.Dockerfile index a996ec4a63af..c20f8b6bd90b 100644 --- a/docker/build_and_push_base.Dockerfile +++ b/docker/build_and_push_base.Dockerfile @@ -1,58 +1,28 @@ - - # syntax=docker/dockerfile:1 # Keep this syntax directive! It's used to enable Docker BuildKit -# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 -# but I try to keep it updated (see history) ################################ -# PYTHON-BASE -# Sets up all our shared environment variables +# BUILDER-BASE +# Used to build deps + create our virtual environment ################################ -# use python:3.12.3-slim as the base image until https://github.com/pydantic/pydantic-core/issues/1292 gets resolved -FROM python:3.12.3-slim as python-base - -# python -ENV PYTHONUNBUFFERED=1 \ - # prevents python creating .pyc files - PYTHONDONTWRITEBYTECODE=1 \ - \ - # pip - PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=100 \ - \ - # poetry - # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.8.2 \ - # make poetry install to this location - POETRY_HOME="/opt/poetry" \ - # make poetry create the virtual environment in the project's root - # it gets named `.venv` - POETRY_VIRTUALENVS_IN_PROJECT=true \ - # do not ask any interactive question - POETRY_NO_INTERACTION=1 \ - \ - # paths - # this is where our requirements + virtual environment will live - PYSETUP_PATH="/opt/pysetup" \ - VENV_PATH="/opt/pysetup/.venv" - - -# prepend poetry and venv to path -ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" +# 1. use python:3.12.3-slim as the base image until https://github.com/pydantic/pydantic-core/issues/1292 gets resolved +# 2. do not add --platform=$BUILDPLATFORM because the pydantic binaries must be resolved for the final architecture +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS builder +# Install the project into `/app` +WORKDIR /app + +# Enable bytecode compilation +ENV UV_COMPILE_BYTECODE=1 + +# Copy from the cache instead of linking since it's a mounted volume +ENV UV_LINK_MODE=copy -################################ -# BUILDER-BASE -# Used to build deps + create our virtual environment -################################ -FROM python-base as builder-base RUN apt-get update \ && apt-get install --no-install-recommends -y \ - # deps for installing poetry - curl \ # deps for building python deps build-essential \ # npm @@ -62,40 +32,60 @@ RUN apt-get update \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN --mount=type=cache,target=/root/.cache \ - curl -sSL https://install.python-poetry.org | python3 - +# Install the project's dependencies using the lockfile and settings +# We need to mount the root uv.lock and pyproject.toml to build the base with uv because we're still using uv workspaces +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=src/backend/base/README.md,target=src/backend/base/README.md \ + --mount=type=bind,source=src/backend/base/uv.lock,target=src/backend/base/uv.lock \ + --mount=type=bind,source=src/backend/base/pyproject.toml,target=src/backend/base/pyproject.toml \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=README.md,target=README.md \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + cd src/backend/base && uv sync --frozen --no-install-project --no-dev --no-editable + +ADD ./src /app/src + +COPY src/frontend /tmp/src/frontend +WORKDIR /tmp/src/frontend +RUN npm install \ + && npm run build \ + && cp -r build /app/src/backend/base/langflow/frontend \ + && rm -rf /tmp/src/frontend + +ADD ./src/backend/base /app/src/backend/base +WORKDIR /app/src/backend/base +# again we need these because of workspaces +ADD ./pyproject.toml /app/pyproject.toml +ADD ./uv.lock /app/uv.lock +ADD ./src/backend/base/pyproject.toml /app/src/backend/base/pyproject.toml +ADD ./src/backend/base/uv.lock /app/src/backend/base/uv.lock +ADD ./src/backend/base/README.md /app/src/backend/base/README.md +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev --no-editable -# Now we need to copy the entire project into the image -COPY pyproject.toml poetry.lock ./ -COPY src/frontend/package.json /tmp/package.json -RUN cd /tmp && npm install -WORKDIR /app -COPY src/frontend ./src/frontend -RUN rm -rf src/frontend/node_modules -RUN cp -a /tmp/node_modules /app/src/frontend -COPY scripts ./scripts -COPY Makefile ./ -COPY README.md ./ -RUN cd src/frontend && npm run build -COPY src/backend ./src/backend -RUN cp -r src/frontend/build src/backend/base/langflow/frontend -RUN rm -rf src/backend/base/dist -RUN useradd -m -u 1000 user && \ - mkdir -p /app/langflow && \ - chown -R user:user /app && \ - chmod -R u+w /app/langflow - -# Update PATH with home/user/.local/bin -ENV PATH="/home/user/.local/bin:${PATH}" -RUN cd src/backend/base && $POETRY_HOME/bin/poetry build - -# Copy virtual environment and built .tar.gz from builder base +################################ +# RUNTIME +# Setup user, utilities and copy the virtual environment only +################################ +FROM python:3.12.3-slim AS runtime + +RUN useradd user -u 1000 -g 0 --no-create-home --home-dir /app/data +# and we use the venv at the root because workspaces +COPY --from=builder --chown=1000 /app/.venv /app/.venv + +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" + +LABEL org.opencontainers.image.title=langflow +LABEL org.opencontainers.image.authors=['Langflow'] +LABEL org.opencontainers.image.licenses=MIT +LABEL org.opencontainers.image.url=https://github.com/langflow-ai/langflow +LABEL org.opencontainers.image.source=https://github.com/langflow-ai/langflow USER user -# Install the package from the .tar.gz -RUN python -m pip install /app/src/backend/base/dist/*.tar.gz --user +WORKDIR /app ENV LANGFLOW_HOST=0.0.0.0 ENV LANGFLOW_PORT=7860 -CMD ["python", "-m", "langflow", "run"] +CMD ["langflow-base", "run"] diff --git a/docker/build_and_push_ep.Dockerfile b/docker/build_and_push_ep.Dockerfile new file mode 100644 index 000000000000..4c5cc2bfaa5a --- /dev/null +++ b/docker/build_and_push_ep.Dockerfile @@ -0,0 +1,90 @@ +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +################################ +# BUILDER-BASE +# Used to build deps + create our virtual environment +################################ + +# 1. use python:3.12.3-slim as the base image until https://github.com/pydantic/pydantic-core/issues/1292 gets resolved +# 2. do not add --platform=$BUILDPLATFORM because the pydantic binaries must be resolved for the final architecture +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS builder + +# Install the project into `/app` +WORKDIR /app + +# Enable bytecode compilation +ENV UV_COMPILE_BYTECODE=1 + +# Copy from the cache instead of linking since it's a mounted volume +ENV UV_LINK_MODE=copy + +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for building python deps + build-essential \ + # npm + npm \ + # gcc + gcc \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=README.md,target=README.md \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=src/backend/base/README.md,target=src/backend/base/README.md \ + --mount=type=bind,source=src/backend/base/uv.lock,target=src/backend/base/uv.lock \ + --mount=type=bind,source=src/backend/base/pyproject.toml,target=src/backend/base/pyproject.toml \ + uv sync --frozen --no-install-project --no-editable + +ADD ./src /app/src + +COPY src/frontend /tmp/src/frontend +WORKDIR /tmp/src/frontend +RUN --mount=type=cache,target=/root/.npm \ + npm ci \ + && npm run build \ + && cp -r build /app/src/backend/langflow/frontend \ + && rm -rf /tmp/src/frontend + +WORKDIR /app +ADD ./pyproject.toml /app/pyproject.toml +ADD ./uv.lock /app/uv.lock +ADD ./README.md /app/README.md + +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-editable + +################################ +# RUNTIME +# Setup user, utilities and copy the virtual environment only +################################ +FROM python:3.12.3-slim AS runtime + +RUN useradd user -u 1000 -g 0 --no-create-home --home-dir /app/data && \ + mkdir /data && chown -R 1000:0 /data + +COPY --from=builder --chown=1000 /app/.venv /app/.venv + +# curl is required for langflow health checks +RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* + +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" + +LABEL org.opencontainers.image.title=langflow +LABEL org.opencontainers.image.authors=['Langflow'] +LABEL org.opencontainers.image.licenses=MIT +LABEL org.opencontainers.image.url=https://github.com/langflow-ai/langflow +LABEL org.opencontainers.image.source=https://github.com/langflow-ai/langflow + +WORKDIR /app + +ENV LANGFLOW_HOST=0.0.0.0 +ENV LANGFLOW_PORT=7860 + +USER 1000 +ENTRYPOINT ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--backend-only"] diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index fe60259b9cad..ad1497ef384f 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -1,20 +1,26 @@ -FROM python:3.12-bookworm +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim ENV TZ=UTC WORKDIR /app -RUN apt update -y -RUN apt install \ +RUN apt-get update && apt-get install -y \ build-essential \ curl \ npm \ - -y + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* COPY . /app -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-interaction --no-ansi +# Install dependencies using uv +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=README.md,target=README.md \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + --mount=type=bind,source=src/backend/base/README.md,target=src/backend/base/README.md \ + --mount=type=bind,source=src/backend/base/uv.lock,target=src/backend/base/uv.lock \ + --mount=type=bind,source=src/backend/base/pyproject.toml,target=src/backend/base/pyproject.toml \ + uv sync --frozen --no-install-project --no-dev EXPOSE 7860 EXPOSE 3000 diff --git a/docker/dev.docker-compose.yml b/docker/dev.docker-compose.yml index 31b6505c9e4a..5cad3f5d2a9d 100644 --- a/docker/dev.docker-compose.yml +++ b/docker/dev.docker-compose.yml @@ -23,7 +23,7 @@ services: volumes: - ../:/app depends_on: - - postgres # Dependência no seu banco de dados existente + - postgres networks: - dev-langflow diff --git a/docker_example/docker-compose.yml b/docker_example/docker-compose.yml index 7e7e3406486d..4dda51442253 100644 --- a/docker_example/docker-compose.yml +++ b/docker_example/docker-compose.yml @@ -2,7 +2,8 @@ version: "3.8" services: langflow: - image: langflowai/langflow:latest + image: langflowai/langflow:latest # or another version tag on https://hub.docker.com/r/langflowai/langflow + pull_policy: always # set to 'always' when using 'latest' image ports: - "7860:7860" depends_on: diff --git a/docs/css/custom.css b/docs/css/custom.css index 68d901e0761c..85dba23c945d 100644 --- a/docs/css/custom.css +++ b/docs/css/custom.css @@ -8,7 +8,7 @@ --ifm-navbar-link-hover-color: initial; --ifm-navbar-padding-vertical: 0; --ifm-navbar-item-padding-vertical: 0; - --ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, + --ifm-font-family-base: Inter, -apple-system, BlinkMacSystemFont, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI emoji"; --ifm-font-family-monospace: "SFMono-Regular", "Roboto Mono", Consolas, "Liberation Mono", Menlo, Courier, monospace; @@ -118,17 +118,15 @@ body { width: 24px; height: 24px; display: flex; - background: url("/logos/gitLight.svg") - no-repeat; + background: url("/logos/gitLight.svg") no-repeat; } -[data-theme='dark'] .header-github-link:before { +[data-theme="dark"] .header-github-link:before { content: ""; width: 24px; height: 24px; display: flex; - background: url("/logos/gitDark.svg") - no-repeat; + background: url("/logos/gitDark.svg") no-repeat; } /* Twitter */ @@ -145,7 +143,7 @@ body { background-size: contain; } -[data-theme='dark'] .header-twitter-link::before { +[data-theme="dark"] .header-twitter-link::before { content: ""; width: 24px; height: 24px; @@ -164,7 +162,7 @@ body { opacity: 0.6; } -[data-theme='dark'] .header-discord-link::before { +[data-theme="dark"] .header-discord-link::before { content: ""; width: 24px; height: 24px; @@ -241,6 +239,8 @@ body { min-height: 70px; } -.theme-doc-sidebar-item-category.theme-doc-sidebar-item-category-level-2.menu__list-item:not(:first-child) { - margin-top: 0.25rem!important; -} \ No newline at end of file +.theme-doc-sidebar-item-category.theme-doc-sidebar-item-category-level-2.menu__list-item:not( + :first-child + ) { + margin-top: 0.25rem !important; +} diff --git a/docs/docs/Components/components-agents.md b/docs/docs/Components/components-agents.md new file mode 100644 index 000000000000..778e33567c33 --- /dev/null +++ b/docs/docs/Components/components-agents.md @@ -0,0 +1,307 @@ +# Agents + +Agent components are used to define the behavior and capabilities of AI agents in your flow. Agents can interact with APIs, databases, and other services, but can also use LLMs as a reasoning engine to decide which course to take in your flow. + +## CSV Agent + +This component creates a CSV agent from a CSV file and LLM. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| path | File | Path to the CSV file | +| agent_type | String | Type of agent to create (zero-shot-react-description, openai-functions, or openai-tools) | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | CSV agent instance | + +## CrewAI Agent + +This component represents an Agent of CrewAI, allowing for the creation of specialized AI agents with defined roles, goals, and capabilities within a crew. + +For more information, see the [CrewAI documentation](https://docs.crewai.com/core-concepts/Agents/). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| role | Role | The role of the agent | +| goal | Goal | The objective of the agent | +| backstory | Backstory | The backstory of the agent | +| tools | Tools | Tools at agent's disposal | +| llm | Language Model | Language model that will run the agent | +| memory | Memory | Whether the agent should have memory or not | +| verbose | Verbose | Enables verbose output | +| allow_delegation | Allow Delegation | Whether the agent is allowed to delegate tasks to other agents | +| allow_code_execution | Allow Code Execution | Whether the agent is allowed to execute code | +| kwargs | kwargs | Additional keyword arguments for the agent | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| output | Agent | The constructed CrewAI Agent object | + +## Hierarchical Crew + +This component represents a group of agents, managing how they should collaborate and the tasks they should perform in a hierarchical structure. This component allows for the creation of a crew with a manager overseeing the task execution. + +For more information, see the [CrewAI documentation](https://docs.crewai.com/how-to/Hierarchical/). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| agents | Agents | List of Agent objects representing the crew members | +| tasks | Tasks | List of HierarchicalTask objects representing the tasks to be executed | +| manager_llm | Manager LLM | Language model for the manager agent (optional) | +| manager_agent | Manager Agent | Specific agent to act as the manager (optional) | +| verbose | Verbose | Enables verbose output for detailed logging | +| memory | Memory | Specifies the memory configuration for the crew | +| use_cache | Use Cache | Enables caching of results | +| max_rpm | Max RPM | Sets the maximum requests per minute | +| share_crew | Share Crew | Determines if the crew information is shared among agents | +| function_calling_llm | Function Calling LLM | Specifies the language model for function calling | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| crew | Crew | The constructed Crew object with hierarchical task execution | + +## JSON Agent + +This component creates a JSON agent from a JSON or YAML file and an LLM. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| path | File | Path to the JSON or YAML file | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | JSON agent instance | + +## OpenAI Tools Agent + +This component creates an OpenAI Tools Agent using LangChain. + +For more information, see the [LangChain documentation](https://python.langchain.com/v0.1/docs/modules/agents/agent_types/openai_tools/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent (must be tool-enabled) | +| system_prompt | String | System prompt for the agent | +| user_prompt | String | User prompt template (must contain 'input' key) | +| chat_history | List[Data] | Optional chat history for the agent | +| tools | List[Tool] | List of tools available to the agent | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | OpenAI Tools Agent instance | + +## OpenAPI Agent + +This component creates an OpenAPI Agent to interact with APIs defined by OpenAPI specifications. + +For more information, see the LangChain documentation on OpenAPI Agents. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| path | File | Path to the OpenAPI specification file (JSON or YAML) | +| allow_dangerous_requests | Boolean | Whether to allow potentially dangerous API requests | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | OpenAPI Agent instance | + +## SQL Agent + +This component creates a SQL Agent to interact with SQL databases. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| database_uri | String | URI of the SQL database to connect to | +| extra_tools | List[Tool] | Additional tools to provide to the agent (optional) | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | SQL Agent instance | + +## Sequential Crew + +This component represents a group of agents with tasks that are executed sequentially. This component allows for the creation of a crew that performs tasks in a specific order. + +For more information, see the [CrewAI documentation](https://docs.crewai.com/how-to/Sequential/). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| tasks | Tasks | List of SequentialTask objects representing the tasks to be executed | +| verbose | Verbose | Enables verbose output for detailed logging | +| memory | Memory | Specifies the memory configuration for the crew | +| use_cache | Use Cache | Enables caching of results | +| max_rpm | Max RPM | Sets the maximum requests per minute | +| share_crew | Share Crew | Determines if the crew information is shared among agents | +| function_calling_llm | Function Calling LLM | Specifies the language model for function calling | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| crew | Crew | The constructed Crew object with sequential task execution | + +## Sequential task agent + +This component creates a CrewAI Task and its associated Agent, allowing for the definition of sequential tasks with specific agent roles and capabilities. + +For more information, see the [CrewAI documentation](https://docs.crewai.com/how-to/Sequential/). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| role | Role | The role of the agent | +| goal | Goal | The objective of the agent | +| backstory | Backstory | The backstory of the agent | +| tools | Tools | Tools at agent's disposal | +| llm | Language Model | Language model that will run the agent | +| memory | Memory | Whether the agent should have memory or not | +| verbose | Verbose | Enables verbose output | +| allow_delegation | Allow Delegation | Whether the agent is allowed to delegate tasks to other agents | +| allow_code_execution | Allow Code Execution | Whether the agent is allowed to execute code | +| agent_kwargs | Agent kwargs | Additional kwargs for the agent | +| task_description | Task Description | Descriptive text detailing task's purpose and execution | +| expected_output | Expected Task Output | Clear definition of expected task outcome | +| async_execution | Async Execution | Boolean flag indicating asynchronous task execution | +| previous_task | Previous Task | The previous task in the sequence (for chaining) | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| task_output | Sequential Task | List of SequentialTask objects representing the created task(s) | + +## Tool Calling Agent + +This component creates a Tool Calling Agent using LangChain. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| system_prompt | String | System prompt for the agent | +| user_prompt | String | User prompt template (must contain 'input' key) | +| chat_history | List[Data] | Optional chat history for the agent | +| tools | List[Tool] | List of tools available to the agent | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | Tool Calling Agent instance | + +## Vector Store Agent + +This component creates a Vector Store Agent using LangChain. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| vectorstore | VectorStoreInfo | Vector store information for the agent to use | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | Vector Store Agent instance | + +## Vector Store Router Agent + +This component creates a Vector Store Router Agent using LangChain. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| vectorstores | List[VectorStoreInfo] | List of vector store information for the agent to route between | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | Vector Store Router Agent instance | + +## XML Agent + +This component creates an XML Agent using LangChain. + +The agent uses XML formatting for tool instructions to the Language Model. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| llm | LanguageModel | Language model to use for the agent | +| user_prompt | String | Custom prompt template for the agent (includes XML formatting instructions) | +| tools | List[Tool] | List of tools available to the agent | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| agent | AgentExecutor | XML Agent instance | \ No newline at end of file diff --git a/docs/docs/Components/components-custom-components.md b/docs/docs/Components/components-custom-components.md index 56a2de5ff775..1f0f535bb58e 100644 --- a/docs/docs/Components/components-custom-components.md +++ b/docs/docs/Components/components-custom-components.md @@ -4,105 +4,175 @@ sidebar_position: 8 slug: /components-custom-components --- +# Custom Components +Custom components are created within Langflow and extend the platform's functionality with custom, resusable Python code. -Langflow components can be created from within the platform, allowing users to extend the platform's functionality using Python code. They encapsulate are designed to be independent units, reusable across different workflows. +Since Langflow operates with Python behind the scenes, you can implement any Python function within a Custom Component. This means you can leverage the power of libraries such as Pandas, Scikit-learn, Numpy, and thousands of other packages to create components that handle data processing in unlimited ways. You can use any type as long as the type is properly annotated in the output methods (e.g., `> list[int]`). +Custom Components create reusable and configurable components to enhance the capabilities of Langflow, making it a powerful tool for developing complex processing between user and AI messages. -These components can be easily connected within a language model pipeline, adding freedom and flexibility to what can be included in between user and AI messages. +## How to Create Custom Components - -![](./238089171.png) - - -Since Langflow operates with Python behind the scenes, you can implement any Python function within a Custom Component. This means you can leverage the power of libraries such as Pandas, Scikit-learn, Numpy, and thousands of packages to create components that handle data processing in unlimited ways. +Creating custom components in Langflow involves creating a Python class that defines the component's functionality, inputs, and outputs. +The default code provides a working structure for your custom component. +```python +# from langflow.field_typing import Data +from langflow.custom import Component +from langflow.io import MessageTextInput, Output +from langflow.schema import Data -Custom Components are not just about extending functionality; they also streamline the development process. By creating reusable and configurable components, you can enhance the capabilities of Langflow, making it a powerful tool for developing complex workflows. +class CustomComponent(Component): + display_name = "Custom Component" + description = "Use as a template to create your own component." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "custom_components" + name = "CustomComponent" + inputs = [ + MessageTextInput(name="input_value", display_name="Input Value", value="Hello, World!"), + ] -### Key Characteristics: {#d3a151089a9e4584bd420461cd1432c6} + outputs = [ + Output(display_name="Output", name="output", method="build_output"), + ] -1. **Modular and Reusable**: Designed as independent units, components encapsulate specific functionality, making them reusable across different projects and workflows. -2. **Integration with Python Libraries**: You can import libraries like Pandas, Scikit-learn, Numpy, etc., to build components that handle data processing, machine learning, numerical computations, and more. -3. **Flexible Inputs and Outputs**: While Langflow offers native input and output types, you can use any type as long as they are properly annotated in the output methods (e.g., `> list[int]`). -4. **Python-Powered**: Since Langflow operates with Python behind the scenes, any Python function can be implemented within a custom component. -5. **Enhanced Workflow**: Custom components serve as reusable building blocks, enabling you to create pre-processing visual blocks with ease and integrate them into your language model pipeline. + def build_output(self) -> Data: + data = Data(value=self.input_value) + self.status = data + return data -### Why Use Custom Components? {#827a2b5acec94426a4a2106a8332622d} +``` -- **Customization**: Tailor the functionality to your specific needs by writing Python code that suits your workflow. -- **Flexibility**: Add any Python-based logic or processing step between user/AI messages, enhancing the flexibility of Langflow. -- **Efficiency**: Streamline your development process by creating reusable, configurable components that can be easily deployed. +You can create your class in your favorite text editor outside of Langflow and paste it in later, or just follow along in the code pane. -### How to Write Them {#2088ade519514bb3923cdf7f2ac2089a} +1. In Langflow, from under **Helpers**, drag a **Custom Component** into the workspace. +2. Open the component's code pane. +3. Import dependencies. +Your custom component inherits from the langflow `Component` class so you need to include it. +```python +from langflow.custom import Component +from langflow.io import MessageTextInput, Output +from langflow.schema import Data +``` +4. **Define the Class**: Start by defining a Python class that inherits from `Component`. This class will encapsulate the functionality of your custom component. +```python +class CustomComponent(Component): + display_name = "Custom Component" + description = "Use as a template to create your own component." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "custom_components" + name = "CustomComponent" +``` +5. **Specify Inputs and Outputs**: Use Langflow's input and output classes to define the inputs and outputs of your component. They should be declared as class attributes. +```python + inputs = [ + MessageTextInput(name="input_value", display_name="Input Value", value="Hello, World!"), + ] ---- + outputs = [ + Output(display_name="Output", name="output", method="build_output"), + ] +``` +6. **Implement Output Methods**: Implement methods for each output, which contains the logic of your component. These methods can access input values using `self.` , return processed values and define what to be displayed in the component with the `self.status` attribute. +```python + def build_output(self) -> Data: + data = Data(value=self.input_value) + self.status = data + return data +``` +7. **Use Proper Annotations**: Ensure that output methods are properly annotated with their types. Langflow uses these annotations to validate and handle data correctly. For example, this method is annotated to output `Data`. +```python + def build_output(self) -> Data: +``` +8. Click **Check & Save** to confirm your component works. +You now have an operational custom component. +![](./238089171.png) -Writing custom components in Langflow involves creating a Python class that defines the component's functionality, inputs, and outputs. The process involves a few key steps: -1. **Define the Class**: Start by defining a Python class that inherits from `Component`. This class will encapsulate the functionality of your custom component. -2. **Specify Inputs and Outputs**: Use Langflow's input and output classes to define the inputs and outputs of your component. They should be declared as class attributes. -3. **Implement Output Methods**: Implement methods for each output, which contains the logic of your component. These methods can access input values using `self.` , return processed values and define what to be displayed in the component with the `self.status` attribute. -4. **Use Proper Annotations**: Ensure that output methods are properly annotated with their types. Langflow uses these annotations to validate and handle data correctly. +## Add inputs and modify output methods -Here's a basic structure of a custom component: +This code defines a custom component that accepts 5 inputs and outputs a Message. +Copy and paste it into the Custom Component code pane and click **Check & Save.** ```python from langflow.custom import Component -from langflow.inputs import StrInput, IntInput -from langflow.template import Output +from langflow.inputs import StrInput, MultilineInput, SecretStrInput, IntInput, DropdownInput +from langflow.template import Output, Input +from langflow.schema.message import Message class MyCustomComponent(Component): - icon = "coffee" # check lucide.dev/icons or pass an emoji + display_name = "My Custom Component" + description = "An example of a custom component with various input types." inputs = [ StrInput( - name="input_text", - display_name="Input Text", - info="Text to be processed.", + name="username", + display_name="Username", + info="Enter your username." + ), + SecretStrInput( + name="password", + display_name="Password", + info="Enter your password." + ), + MessageTextInput( + name="special_message", + display_name="special_message", + info="Enter a special message.", ), IntInput( - name="input_number", - display_name="Input Number", - info="Number to be processed.", + name="age", + display_name="Age", + info="Enter your age." ), + DropdownInput( + name="gender", + display_name="Gender", + options=["Male", "Female", "Other"], + info="Select your gender." + ) ] outputs = [ - Output(display_name="Processed Text", name="processed_text", method="process_text"), + Output(display_name="Result", name="result", method="process_inputs"), ] - def process_text(self) -> str: - input_text = self.input_text - input_number = self.input_number - # Implement your logic here - processed_text = f"{input_text} processed with number {input_number}" - self.status = processed_text - return processed_text - - + def process_inputs(self) -> Message: + """ + Process the user inputs and return a Message object. + + Returns: + Message: A Message object containing the processed information. + """ + try: + processed_text = f"User {self.username} (Age: {self.age}, Gender: {self.gender}) " \ + f"sent the following special message: {self.special_message}" + return Message(text=processed_text) + except AttributeError as e: + return Message(text=f"Error processing inputs: {str(e)}") ``` +Since the component outputs a `Message`, you can wire it into a chat and pass messages to yourself. -Paste that code into the Custom Component code snippet and click **Check & Save.** +Your Custom Component accepts the Chat Input message through `MessageTextInput`, fills in the variables with the `process_inputs` method, and finally passes the message `User Username (Age: 49, Gender: Male) sent the following special message: Hello!` to Chat Output. +![](./custom-component-chat.png) -![](./1028644105.png) - - -You should see something like the component below. Double click the name or description areas to edit them. - +By defining inputs this way, Langflow can automatically handle the validation and display of these fields in the user interface, making it easier to create robust and user-friendly custom components. -![](./241280398.png) +All of the types detailed above derive from a general class that can also be accessed through the generic `Input` class. +:::tip +Use `MessageInput` to get the entire Message object instead of just the text. +::: ## Input Types {#3815589831f24ab792328ed233c8b00d} - --- @@ -224,62 +294,6 @@ Represents a file input field. - **Attributes:** `file_types` to specify the types of files that can be uploaded. - **Input Types:** `["File"]` -Here is an example of how these inputs can be defined in a custom component: - - -```python -from langflow.custom import Component -from langflow.inputs import StrInput, MultilineInput, SecretStrInput, IntInput, DropdownInput -from langflow.template import Output, Input - -class MyCustomComponent(Component): - display_name = "My Custom Component" - description = "An example of a custom component with various input types." - - inputs = [ - StrInput( - name="username", - display_name="Username", - info="Enter your username." - ), - SecretStrInput( - name="password", - display_name="Password", - info="Enter your password." - ), - MultilineInput( - name="description", - display_name="Description", - info="Enter a detailed description.", - ), - IntInput( - name="age", - display_name="Age", - info="Enter your age." - ), - DropdownInput( - name="gender", - display_name="Gender", - options=["Male", "Female", "Other"], - info="Select your gender." - ) - ] - - outputs = [ - Output(display_name="Result", name="result", method="process_inputs"), - ] - - def process_inputs(self): - # Your processing logic here - return "Processed" -``` - - -By defining inputs this way, Langflow can automatically handle the validation and display of these fields in the user interface, making it easier to create robust and user-friendly custom components. - - -All of the types detailed above derive from a general class that can also be accessed through the generic `Input` class. - ### Generic Input {#278e2027493e45b68746af0a5b6c06f6} @@ -316,74 +330,106 @@ The `Input` class is highly customizable, allowing you to specify a wide range - `load_from_db`: Boolean indicating if the field should load from the database. Default is `False`. - `title_case`: Boolean indicating if the display name should be in title case. Default is `True`. -Below is an example of how to define inputs for a component using the `Input` class: +## Create a Custom Component with Generic Input +Here is an example of how to define inputs for a component using the `Input` class. + +Copy and paste it into the Custom Component code pane and click **Check & Save.** ```python from langflow.template import Input, Output from langflow.custom import Component from langflow.field_typing import Text +from langflow.schema.message import Message +from typing import Dict, Any -class ExampleComponent(Component): - display_name = "Example Component" - description = "An example component demonstrating input fields." +class TextAnalyzerComponent(Component): + display_name = "Text Analyzer" + description = "Analyzes input text and provides basic statistics." inputs = [ Input( name="input_text", display_name="Input Text", - field_type="str", + field_type="Message", required=True, - placeholder="Enter some text", + placeholder="Enter text to analyze", multiline=True, - info="This is a required text input.", + info="The text you want to analyze.", input_types=["Text"] ), Input( - name="max_length", - display_name="Max Length", - field_type="int", + name="include_word_count", + display_name="Include Word Count", + field_type="bool", required=False, - placeholder="Maximum length", - info="Enter the maximum length of the text.", - range_spec={"min": 0, "max": 1000}, + info="Whether to include word count in the analysis.", ), Input( - name="options", - display_name="Options", - field_type="str", - is_list=True, - options=["Option 1", "Option 2", "Option 3"], - info="Select one or more options." + name="perform_sentiment_analysis", + display_name="Perform Sentiment Analysis", + field_type="bool", + required=False, + info="Whether to perform basic sentiment analysis.", ), ] outputs = [ - Output(display_name="Result", name="result", method="process_input"), + Output(display_name="Analysis Results", name="results", method="analyze_text"), ] - def process_input(self) -> Text: - # Process the inputs and generate output - return Text(value=f"Processed: {self.input_text}, Max Length: {self.max_length}, Options: {self.options}") + def analyze_text(self) -> Message: + # Extract text from the Message object + if isinstance(self.input_text, Message): + text = self.input_text.text + else: + text = str(self.input_text) -# Define how to use the inputs and outputs -component = ExampleComponent() + results = { + "character_count": len(text), + "sentence_count": text.count('.') + text.count('!') + text.count('?') + } + + if self.include_word_count: + results["word_count"] = len(text.split()) + + if self.perform_sentiment_analysis: + # Basic sentiment analysis + text_lower = text.lower() + if "happy" in text_lower or "good" in text_lower: + sentiment = "positive" + elif "sad" in text_lower or "bad" in text_lower: + sentiment = "negative" + else: + sentiment = "neutral" + + results["sentiment"] = sentiment + # Convert the results dictionary to a formatted string + formatted_results = "\n".join([f"{key}: {value}" for key, value in results.items()]) + # Return a Message object + return Message(text=formatted_results) + +# Define how to use the inputs and outputs +component = TextAnalyzerComponent() ``` +In this custom component: -In this example: +- The `input_text` input is a required multi-line text field that accepts a Message object or a string. It's used to provide the text for analysis. -- The `input_text` input is a required multi-line text field. -- The `max_length` input is an optional integer field with a range specification. -- The `options` input is a list of strings with predefined options. +- The `include_word_count` input is an optional boolean field. When set to True, it adds a word count to the analysis results. -These attributes allow for a high degree of customization, making it easy to create input fields that suit the needs of your specific component. +- The `perform_sentiment_analysis` input is an optional boolean field. When set to True, it triggers a basic sentiment analysis of the input text. +The component performs basic text analysis, including character count and sentence count (based on punctuation marks). If word count is enabled, it splits the text and counts the words. If sentiment analysis is enabled, it performs a simple keyword-based sentiment classification (positive, negative, or neutral). -### Multiple Outputs {#6f225be8a142450aa19ee8e46a3b3c8c} +Since the component inputs and outputs a `Message`, you can wire the component into a chat and see how the basic custom component logic interacts with your input. +![](./custom-component-inputs-chat.png) + +## Create a Custom Component with Multiple Outputs {#6f225be8a142450aa19ee8e46a3b3c8c} --- @@ -393,7 +439,7 @@ In Langflow, custom components can have multiple outputs. Each output can be ass 1. **Definition of Outputs**: Each output is defined in the `outputs` list of the component. Each output is associated with a display name, an internal name, and a method that gets called to generate the output. 2. **Output Methods**: The methods associated with outputs are responsible for generating the data for that particular output. These methods are called when the component is executed, and each method can independently produce its result. -Below is an example of a component with two outputs: +This example component has two outputs: - `process_data`: Processes the input text (e.g., converts it to uppercase) and returns it. - `get_processing_function`: Returns the `process_data` method itself to be reused in composition. @@ -434,18 +480,12 @@ class DualOutputComponent(Component): return self.process_data ``` - This example shows how to define multiple outputs in a custom component. The first output returns the processed data, while the second output returns the processing function itself. - The `processing_function` output can be used in scenarios where the function itself is needed for further processing or dynamic flow control. Notice how both outputs are properly annotated with their respective types, ensuring clarity and type safety. -## Special Operations {#b1ef2d18e2694b93927ae9403d24b96b} - - ---- - +## Special Operations Advanced methods and attributes offer additional control and functionality. Understanding how to leverage these can enhance your custom components' capabilities. @@ -454,3 +494,7 @@ Advanced methods and attributes offer additional control and functionality. Unde - `self.status`: Use this to update the component's status or intermediate results. It helps track the component's internal state or store temporary data. - `self.graph.flow_id`: Retrieve the flow ID, useful for maintaining context or debugging. - `self.stop("output_name")`: Use this method within an output function to prevent data from being sent through other components. This method stops next component execution and is particularly useful for specific operations where a component should stop from running based on specific conditions. + +## Contribute Custom Components to Langflow + +See [How to Contribute](/contributing-how-to-contribute#submitting-components) to contribute your custom component to Langflow. \ No newline at end of file diff --git a/docs/docs/Components/components-data.md b/docs/docs/Components/components-data.md index 65d95326fec9..87ea75ac7314 100644 --- a/docs/docs/Components/components-data.md +++ b/docs/docs/Components/components-data.md @@ -4,95 +4,157 @@ sidebar_position: 3 slug: /components-data --- +## API Request +This component sends HTTP requests to the specified URLs. -:::info +Use this component to interact with external APIs or services and retrieve data. Ensure that the URLs are valid and that you configure the method, headers, body, and timeout correctly. -This page may contain outdated information. It will be updated as soon as possible. +### Parameters -::: +#### Inputs +| Name | Display Name | Info | +| ------- | ------------ | -------------------------------------------------------------------------- | +| URLs | URLs | The URLs to target | +| curl | curl | Paste a curl command to fill in the dictionary fields for headers and body | +| Method | HTTP Method | The HTTP method to use, such as GET or POST | +| Headers | Headers | The headers to include with the request | +| Body | Request Body | The data to send with the request (for methods like POST, PATCH, PUT) | +| Timeout | Timeout | The maximum time to wait for a response | +## Directory +This component recursively loads files from a directory, with options for file types, depth, and concurrency. -## API Request {#23da589293f74016a1f70d6d7c0fdc55} +### Parameters +| Input | Type | Description | +| ------------------ | ---------------- | -------------------------------------------------- | +| path | MessageTextInput | Path to the directory to load files from | +| types | MessageTextInput | File types to load (leave empty to load all types) | +| depth | IntInput | Depth to search for files | +| max_concurrency | IntInput | Maximum concurrency for loading files | +| load_hidden | BoolInput | If true, hidden files will be loaded | +| recursive | BoolInput | If true, the search will be recursive | +| silent_errors | BoolInput | If true, errors will not raise an exception | +| use_multithreading | BoolInput | If true, multithreading will be used | ---- +| Output | Type | Description | +| ------ | ---------- | ----------------------------------- | +| data | List[Data] | Loaded file data from the directory | +## File -This component sends HTTP requests to the specified URLs. - +The FileComponent is a class that loads and parses text files of various supported formats, converting the content into a Data object. It supports multiple file types and provides an option for silent error handling. -Use this component to interact with external APIs or services and retrieve data. Ensure that the URLs are valid and that you configure the method, headers, body, and timeout correctly. +### Parameters +#### Inputs -**Parameters:** +| Name | Display Name | Info | +| ------------- | ------------- | -------------------------------------------- | +| path | Path | File path to load. | +| silent_errors | Silent Errors | If true, errors will not raise an exception. | -- **URLs:** The URLs to target. -- **Method:** The HTTP method, such as GET or POST. -- **Headers:** The headers to include with the request. -- **Body:** The data to send with the request (for methods like POST, PATCH, PUT). -- **Timeout:** The maximum time to wait for a response. +#### Outputs -## Directory {#4fe56acaaac847029ace173dc793f8f4} +| Name | Display Name | Info | +| ---- | ------------ | -------------------------------------------- | +| data | Data | Parsed content of the file as a Data object. | +## URL ---- +The URLComponent is a class that fetches content from one or more URLs, processes the content, and returns it as a list of Data objects. It ensures that the provided URLs are valid and uses WebBaseLoader to fetch the content. +### Parameters -This component recursively retrieves files from a specified directory. +#### Inputs +| Name | Display Name | Info | +| ---- | ------------ | ---------------------- | +| urls | URLs | Enter one or more URLs | -Use this component to retrieve various file types, such as text or JSON files, from a directory. Make sure to provide the correct path and configure the other parameters as needed. +#### Outputs +| Name | Display Name | Info | +| ---- | ------------ | ------------------------------------------------------------ | +| data | Data | List of Data objects containing fetched content and metadata | -**Parameters:** +## Gmail Loader -- **Path:** The directory path. -- **Types:** The types of files to retrieve. Leave this blank to retrieve all file types. -- **Depth:** The level of directory depth to search. -- **Max Concurrency:** The maximum number of simultaneous file loading operations. -- **Load Hidden:** Set to true to include hidden files. -- **Recursive:** Set to true to enable recursive search. -- **Silent Errors:** Set to true to suppress exceptions on errors. -- **Use Multithreading:** Set to true to use multithreading in file loading. +This component loads emails from Gmail using provided credentials and filters. -## File {#d5d4bb78ce0a473d8a3b6a296d3e8383} +For more on creating a service account JSON, see [Service Account JSON](https://developers.google.com/identity/protocols/oauth2/service-account). +### Parameters ---- +| Input | Type | Description | +| ----------- | ---------------- | ------------------------------------------------------------------------------------ | +| json_string | SecretStrInput | JSON string containing OAuth 2.0 access token information for service account access | +| label_ids | MessageTextInput | Comma-separated list of label IDs to filter emails | +| max_results | MessageTextInput | Maximum number of emails to load | +| Output | Type | Description | +| ------ | ---- | ----------------- | +| data | Data | Loaded email data | -This component loads a file. +## Google Drive Loader +This component loads documents from Google Drive using provided credentials and a single document ID. -Use this component to load files, such as text or JSON files. Ensure you specify the correct path and configure other parameters as necessary. +For more on creating a service account JSON, see [Service Account JSON](https://developers.google.com/identity/protocols/oauth2/service-account). +### Parameters -**Parameters:** +| Input | Type | Description | +| ----------- | ---------------- | ------------------------------------------------------------------------------------ | +| json_string | SecretStrInput | JSON string containing OAuth 2.0 access token information for service account access | +| document_id | MessageTextInput | Single Google Drive document ID | -- **Path:** The file path. -- **Silent Errors:** Set to true to prevent exceptions on errors. +| Output | Type | Description | +| ------ | ---- | -------------------- | +| docs | Data | Loaded document data | -## URL {#1cc513827a0942d6885b3a9168eabc97} +## Google Drive Search +This component searches Google Drive files using provided credentials and query parameters. ---- +For more on creating a service account JSON, see [Service Account JSON](https://developers.google.com/identity/protocols/oauth2/service-account). +### Parameters -This component retrieves content from specified URLs. +| Input | Type | Description | +| -------------- | ---------------- | ------------------------------------------------------------------------------------ | +| token_string | SecretStrInput | JSON string containing OAuth 2.0 access token information for service account access | +| query_item | DropdownInput | The field to query | +| valid_operator | DropdownInput | Operator to use in the query | +| search_term | MessageTextInput | The value to search for in the specified query item | +| query_string | MessageTextInput | The query string used for searching (can be edited manually) | +| Output | Type | Description | +| ---------- | --------- | ----------------------------------------------- | +| doc_urls | List[str] | URLs of the found documents | +| doc_ids | List[str] | IDs of the found documents | +| doc_titles | List[str] | Titles of the found documents | +| Data | Data | Document titles and URLs in a structured format | -Ensure the URLs are valid and adjust other parameters as needed. **Parameters:** +## Webhook -- **URLs:** The URLs to retrieve content from. +This component defines a webhook input for the flow. The flow can be triggered by an external HTTP POST request (webhook) sending a JSON payload. -## Create Data {#aac4cad0cd38426191c2e7516285877b} +If the input is not valid JSON, the component will wrap it in a "payload" field. The component's status will reflect any errors or the processed data. +### Parameters ---- +#### Inputs +| Name | Type | Description | +| ---- | ------ | ---------------------------------------------- | +| data | String | JSON payload for testing the webhook component | -This component allows you to create a `Data` from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15). Once you've picked that number you'll need to write the name of the Key and can pass `Text` values from other components to it. +#### Outputs +| Name | Type | Description | +| ----------- | ---- | ------------------------------------- | +| output_data | Data | Processed data from the webhook input | diff --git a/docs/docs/Components/components-embedding-models.md b/docs/docs/Components/components-embedding-models.md index 74e9e446c1a3..b44394ced6bd 100644 --- a/docs/docs/Components/components-embedding-models.md +++ b/docs/docs/Components/components-embedding-models.md @@ -4,170 +4,345 @@ sidebar_position: 6 slug: /components-embedding-models --- +# Embedding Models +Embeddings models are used to convert text into numerical vectors. These vectors can be used for various tasks such as similarity search, clustering, and classification. -:::info +## AI/ML -This page may contain outdated information. It will be updated as soon as possible. +This component generates embeddings using the [AI/ML API](https://docs.aimlapi.com/api-overview/embeddings). -::: +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| model_name | String | The name of the AI/ML embedding model to use | +| aiml_api_key | SecretString | API key for authenticating with the AI/ML service | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance of AIMLEmbeddingsImpl for generating embeddings | + +## Amazon Bedrock Embeddings + +This component is used to load embedding models from [Amazon Bedrock](https://aws.amazon.com/bedrock/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| credentials_profile_name | String | Name of the AWS credentials profile in ~/.aws/credentials or ~/.aws/config, which has access keys or role information | +| model_id | String | ID of the model to call, e.g., `amazon.titan-embed-text-v1`. This is equivalent to the `modelId` property in the `list-foundation-models` API | +| endpoint_url | String | URL to set a specific service endpoint other than the default AWS endpoint | +| region_name | String | AWS region to use, e.g., `us-west-2`. Falls back to `AWS_DEFAULT_REGION` environment variable or region specified in ~/.aws/config if not provided | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using Amazon Bedrock | + +## Astra DB vectorize + +Connect this component to the **Embeddings** port of the [Astra DB vector store component](components-vector-stores#astra-db-serverless) to generate embeddings. + +This component requires that your Astra DB database has a collection that uses a vectorize embedding provider integration. +For more information and instructions, see [Embedding Generation](https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| provider | Embedding Provider | The embedding provider to use | +| model_name | Model Name | The embedding model to use | +| authentication | Authentication | The name of the API key in Astra that stores your [vectorize embedding provider credentials](https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html#embedding-provider-authentication). (Not required if using an [Astra-hosted embedding provider](https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html#supported-embedding-providers).) | +| provider_api_key | Provider API Key | As an alternative to `authentication`, directly provide your embedding provider credentials. | +| model_parameters | Model Parameters | Additional model parameters | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using Astra vectorize | | | + +## Azure OpenAI Embeddings + +This component generates embeddings using Azure OpenAI models. + +### Parameters +#### Inputs +| Name | Type | Description | +|------|------|-------------| +| Model | String | Name of the model to use (default: `text-embedding-3-small`) | +| Azure Endpoint | String | Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/` | +| Deployment Name | String | The name of the deployment | +| API Version | String | The API version to use, options include various dates | +| API Key | String | The API key to access the Azure OpenAI service | +#### Outputs -## Amazon Bedrock Embeddings {#4ddcfde8c1664e358d3f16d718e944d8} +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using Azure OpenAI | +## Cohere Embeddings + +This component is used to load embedding models from [Cohere](https://cohere.com/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| cohere_api_key | String | API key required to authenticate with the Cohere service | +| model | String | Language model used for embedding text documents and performing queries (default: `embed-english-v2.0`) | +| truncate | Boolean | Whether to truncate the input text to fit within the model's constraints (default: `False`) | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using Cohere | + +## Embedding similarity + +This component computes selected forms of similarity between two embedding vectors. + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| embedding_vectors | Embedding Vectors | A list containing exactly two data objects with embedding vectors to compare. | +| similarity_metric | Similarity Metric | Select the similarity metric to use. Options: "Cosine Similarity", "Euclidean Distance", "Manhattan Distance". | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| similarity_data | Similarity Data | Data object containing the computed similarity score and additional information. | + +## Google generative AI embeddings + +This component connects to Google's generative AI embedding service using the GoogleGenerativeAIEmbeddings class from the `langchain-google-genai` package. + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| api_key | API Key | Secret API key for accessing Google's generative AI service (required) | +| model_name | Model Name | Name of the embedding model to use (default: "models/text-embedding-004") | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| embeddings | Embeddings | Built GoogleGenerativeAIEmbeddings object | + +## Hugging Face Embeddings + +:::note +This component is deprecated as of Langflow version 1.0.18. +Instead, use the [Hugging Face API Embeddings component](#hugging-face-embeddings-inference-api). +::: -Used to load embedding models from [Amazon Bedrock](https://aws.amazon.com/bedrock/). +This component loads embedding models from HuggingFace. +Use this component to generate embeddings using locally downloaded Hugging Face models. Ensure you have sufficient computational resources to run the models. -| **Parameter** | **Type** | **Description** | **Default** | -| -------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | ----------- | -| `credentials_profile_name` | `str` | Name of the AWS credentials profile in ~/.aws/credentials or ~/.aws/config, which has access keys or role information. | | -| `model_id` | `str` | ID of the model to call, e.g., `amazon.titan-embed-text-v1`. This is equivalent to the `modelId` property in the `list-foundation-models` API. | | -| `endpoint_url` | `str` | URL to set a specific service endpoint other than the default AWS endpoint. | | -| `region_name` | `str` | AWS region to use, e.g., `us-west-2`. Falls back to `AWS_DEFAULT_REGION` environment variable or region specified in ~/.aws/config if not provided. | | +### Parameters +#### Inputs -## Astra vectorize {#c1e6d1373824424ea130e052ba0f46af} +| Name | Display Name | Info | +|------|--------------|------| +| Cache Folder | Cache Folder | Folder path to cache HuggingFace models | +| Encode Kwargs | Encoding Arguments | Additional arguments for the encoding process | +| Model Kwargs | Model Arguments | Additional arguments for the model | +| Model Name | Model Name | Name of the HuggingFace model to use | +| Multi Process | Multi-Process | Whether to use multiple processes | +## Hugging Face embeddings Inference API -Used to generate server-side embeddings using [DataStax Astra](https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html). +This component generates embeddings using Hugging Face Inference API models. +Use this component to create embeddings with Hugging Face's hosted models. Ensure you have a valid Hugging Face API key. -| **Parameter** | **Type** | **Description** | **Default** | -| ------------------ | -------- | --------------------------------------------------------------------------------------------------------------------- | ----------- | -| `provider` | `str` | The embedding provider to use. | | -| `model_name` | `str` | The embedding model to use. | | -| `authentication` | `dict` | Authentication parameters. Use the Astra Portal to add the embedding provider integration to your Astra organization. | | -| `provider_api_key` | `str` | An alternative to the Astra Authentication that let you use directly the API key of the provider. | | -| `model_parameters` | `dict` | Additional model parameters. | | +### Parameters +#### Inputs -## Cohere Embeddings {#0c5b7b8790da448fabd4c5ddba1fcbde} +| Name | Display Name | Info | +|------|--------------|------| +| API Key | API Key | API key for accessing the Hugging Face Inference API | +| API URL | API URL | URL of the Hugging Face Inference API | +| Model Name | Model Name | Name of the model to use for embeddings | +| Cache Folder | Cache Folder | Folder path to cache Hugging Face models | +| Encode Kwargs | Encoding Arguments | Additional arguments for the encoding process | +| Model Kwargs | Model Arguments | Additional arguments for the model | +| Multi Process | Multi-Process | Whether to use multiple processes | +## MistralAI -Used to load embedding models from [Cohere](https://cohere.com/). +This component generates embeddings using MistralAI models. +### Parameters -| **Parameter** | **Type** | **Description** | **Default** | -| ---------------- | -------- | ------------------------------------------------------------------------- | -------------------- | -| `cohere_api_key` | `str` | API key required to authenticate with the Cohere service. | | -| `model` | `str` | Language model used for embedding text documents and performing queries. | `embed-english-v2.0` | -| `truncate` | `bool` | Whether to truncate the input text to fit within the model's constraints. | `False` | +#### Inputs +| Name | Type | Description | +|------|------|-------------| +| model | String | The MistralAI model to use (default: "mistral-embed") | +| mistral_api_key | SecretString | API key for authenticating with MistralAI | +| max_concurrent_requests | Integer | Maximum number of concurrent API requests (default: 64) | +| max_retries | Integer | Maximum number of retry attempts for failed requests (default: 5) | +| timeout | Integer | Request timeout in seconds (default: 120) | +| endpoint | String | Custom API endpoint URL (default: "https://api.mistral.ai/v1/") | -## Azure OpenAI Embeddings {#8ffb790d5a6c484dab3fe6c777638a44} +#### Outputs +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | MistralAIEmbeddings instance for generating embeddings | -Generate embeddings using Azure OpenAI models. +## NVIDIA +This component generates embeddings using NVIDIA models. -| **Parameter** | **Type** | **Description** | **Default** | -| ----------------- | -------- | -------------------------------------------------------------------------------------------------- | ----------- | -| `Azure Endpoint` | `str` | Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/` | | -| `Deployment Name` | `str` | The name of the deployment. | | -| `API Version` | `str` | The API version to use, options include various dates. | | -| `API Key` | `str` | The API key to access the Azure OpenAI service. | | +### Parameters +#### Inputs -## Hugging Face API Embeddings {#8536e4ee907b48688e603ae9bf7822cb} +| Name | Type | Description | +|------|------|-------------| +| model | String | The NVIDIA model to use for embeddings (e.g., nvidia/nv-embed-v1) | +| base_url | String | Base URL for the NVIDIA API (default: https://integrate.api.nvidia.com/v1) | +| nvidia_api_key | SecretString | API key for authenticating with NVIDIA's service | +| temperature | Float | Model temperature for embedding generation (default: 0.1) | +#### Outputs -Generate embeddings using Hugging Face Inference API models. +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | NVIDIAEmbeddings instance for generating embeddings | +## Ollama Embeddings -| **Parameter** | **Type** | **Description** | **Default** | -| --------------- | -------- | ----------------------------------------------------- | ------------------------ | -| `API Key` | `str` | API key for accessing the Hugging Face Inference API. | | -| `API URL` | `str` | URL of the Hugging Face Inference API. | `http://localhost:8080` | -| `Model Name` | `str` | Name of the model to use for embeddings. | `BAAI/bge-large-en-v1.5` | -| `Cache Folder` | `str` | Folder path to cache Hugging Face models. | | -| `Encode Kwargs` | `dict` | Additional arguments for the encoding process. | | -| `Model Kwargs` | `dict` | Additional arguments for the model. | | -| `Multi Process` | `bool` | Whether to use multiple processes. | `False` | +This component generates embeddings using Ollama models. +### Parameters -## Hugging Face Embeddings {#b2b74732874743d3be6fdf8aae049e74} +#### Inputs +| Name | Type | Description | +|------|------|-------------| +| Ollama Model | String | Name of the Ollama model to use (default: `llama2`) | +| Ollama Base URL | String | Base URL of the Ollama API (default: `http://localhost:11434`) | +| Model Temperature | Float | Temperature parameter for the model. Adjusts the randomness in the generated embeddings | -Used to load embedding models from [HuggingFace](https://huggingface.co/). +#### Outputs +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using Ollama | -| **Parameter** | **Type** | **Description** | **Default** | -| --------------- | -------- | ---------------------------------------------- | ----------------------------------------- | -| `Cache Folder` | `str` | Folder path to cache HuggingFace models. | | -| `Encode Kwargs` | `dict` | Additional arguments for the encoding process. | | -| `Model Kwargs` | `dict` | Additional arguments for the model. | | -| `Model Name` | `str` | Name of the HuggingFace model to use. | `sentence-transformers/all-mpnet-base-v2` | -| `Multi Process` | `bool` | Whether to use multiple processes. | `False` | +## OpenAI Embeddings +This component is used to load embedding models from [OpenAI](https://openai.com/). -## OpenAI Embeddings {#af7630df05a245d1a632e1bf6db2a4c5} +### Parameters +#### Inputs -Used to load embedding models from [OpenAI](https://openai.com/). +| Name | Type | Description | +|------|------|-------------| +| OpenAI API Key | String | The API key to use for accessing the OpenAI API | +| Default Headers | Dict | Default headers for the HTTP requests | +| Default Query | NestedDict | Default query parameters for the HTTP requests | +| Allowed Special | List | Special tokens allowed for processing (default: `[]`) | +| Disallowed Special | List | Special tokens disallowed for processing (default: `["all"]`) | +| Chunk Size | Integer | Chunk size for processing (default: `1000`) | +| Client | Any | HTTP client for making requests | +| Deployment | String | Deployment name for the model (default: `text-embedding-3-small`) | +| Embedding Context Length | Integer | Length of embedding context (default: `8191`) | +| Max Retries | Integer | Maximum number of retries for failed requests (default: `6`) | +| Model | String | Name of the model to use (default: `text-embedding-3-small`) | +| Model Kwargs | NestedDict | Additional keyword arguments for the model | +| OpenAI API Base | String | Base URL of the OpenAI API | +| OpenAI API Type | String | Type of the OpenAI API | +| OpenAI API Version | String | Version of the OpenAI API | +| OpenAI Organization | String | Organization associated with the API key | +| OpenAI Proxy | String | Proxy server for the requests | +| Request Timeout | Float | Timeout for the HTTP requests | +| Show Progress Bar | Boolean | Whether to show a progress bar for processing (default: `False`) | +| Skip Empty | Boolean | Whether to skip empty inputs (default: `False`) | +| TikToken Enable | Boolean | Whether to enable TikToken (default: `True`) | +| TikToken Model Name | String | Name of the TikToken model | +#### Outputs -| **Parameter** | **Type** | **Description** | **Default** | -| -------------------------- | ---------------- | ------------------------------------------------ | ------------------------ | -| `OpenAI API Key` | `str` | The API key to use for accessing the OpenAI API. | | -| `Default Headers` | `Dict[str, str]` | Default headers for the HTTP requests. | | -| `Default Query` | `NestedDict` | Default query parameters for the HTTP requests. | | -| `Allowed Special` | `List[str]` | Special tokens allowed for processing. | `[]` | -| `Disallowed Special` | `List[str]` | Special tokens disallowed for processing. | `["all"]` | -| `Chunk Size` | `int` | Chunk size for processing. | `1000` | -| `Client` | `Any` | HTTP client for making requests. | | -| `Deployment` | `str` | Deployment name for the model. | `text-embedding-3-small` | -| `Embedding Context Length` | `int` | Length of embedding context. | `8191` | -| `Max Retries` | `int` | Maximum number of retries for failed requests. | `6` | -| `Model` | `str` | Name of the model to use. | `text-embedding-3-small` | -| `Model Kwargs` | `NestedDict` | Additional keyword arguments for the model. | | -| `OpenAI API Base` | `str` | Base URL of the OpenAI API. | | -| `OpenAI API Type` | `str` | Type of the OpenAI API. | | -| `OpenAI API Version` | `str` | Version of the OpenAI API. | | -| `OpenAI Organization` | `str` | Organization associated with the API key. | | -| `OpenAI Proxy` | `str` | Proxy server for the requests. | | -| `Request Timeout` | `float` | Timeout for the HTTP requests. | | -| `Show Progress Bar` | `bool` | Whether to show a progress bar for processing. | `False` | -| `Skip Empty` | `bool` | Whether to skip empty inputs. | `False` | -| `TikToken Enable` | `bool` | Whether to enable TikToken. | `True` | -| `TikToken Model Name` | `str` | Name of the TikToken model. | | +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using OpenAI | +## Text embedder -## Ollama Embeddings {#a26d2cb92e6d44669c2cfff71a5e9431} +This component generates embeddings for a given message using a specified embedding model. +### Parameters -Generate embeddings using Ollama models. +#### Inputs +| Name | Display Name | Info | +|------|--------------|------| +| embedding_model | Embedding Model | The embedding model to use for generating embeddings. | +| message | Message | The message for which to generate embeddings. | -| **Parameter** | **Type** | **Description** | **Default** | -| ------------------- | -------- | ---------------------------------------------------------------------------------------- | ------------------------ | -| `Ollama Model` | `str` | Name of the Ollama model to use. | `llama2` | -| `Ollama Base URL` | `str` | Base URL of the Ollama API. | `http://localhost:11434` | -| `Model Temperature` | `float` | Temperature parameter for the model. Adjusts the randomness in the generated embeddings. | | +#### Outputs +| Name | Display Name | Info | +|------|--------------|------| +| embeddings | Embedding Data | Data object containing the original text and its embedding vector. | -## VertexAI Embeddings {#707b38c23cb9413fbbaab1ae7b872311} +## VertexAI Embeddings +This component is a wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). -Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). +### Parameters +#### Inputs -| **Parameter** | **Type** | **Description** | **Default** | -| --------------------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------ | ------------- | -| `credentials` | `Credentials` | The default custom credentials to use. | | -| `location` | `str` | The default location to use when making API calls. | `us-central1` | -| `max_output_tokens` | `int` | Token limit determines the maximum amount of text output from one prompt. | `128` | -| `model_name` | `str` | The name of the Vertex AI large language model. | `text-bison` | -| `project` | `str` | The default GCP project to use when making Vertex API calls. | | -| `request_parallelism` | `int` | The amount of parallelism allowed for requests issued to VertexAI models. | `5` | -| `temperature` | `float` | Tunes the degree of randomness in text generations. Should be a non-negative value. | `0` | -| `top_k` | `int` | How the model selects tokens for output, the next token is selected from the top `k` tokens. | `40` | -| `top_p` | `float` | Tokens are selected from the most probable to least until the sum of their probabilities exceeds the top `p` value. | `0.95` | -| `tuned_model_name` | `str` | The name of a tuned model. If provided, `model_name` is ignored. | | -| `verbose` | `bool` | This parameter controls the level of detail in the output. When set to `True`, it prints internal states of the chain to help debug. | `False` | +| Name | Type | Description | +|------|------|-------------| +| credentials | Credentials | The default custom credentials to use | +| location | String | The default location to use when making API calls (default: `us-central1`) | +| max_output_tokens | Integer | Token limit determines the maximum amount of text output from one prompt (default: `128`) | +| model_name | String | The name of the Vertex AI large language model (default: `text-bison`) | +| project | String | The default GCP project to use when making Vertex API calls | +| request_parallelism | Integer | The amount of parallelism allowed for requests issued to VertexAI models (default: `5`) | +| temperature | Float | Tunes the degree of randomness in text generations. Should be a non-negative value (default: `0`) | +| top_k | Integer | How the model selects tokens for output, the next token is selected from the top `k` tokens (default: `40`) | +| top_p | Float | Tokens are selected from the most probable to least until the sum of their probabilities exceeds the top `p` value (default: `0.95`) | +| tuned_model_name | String | The name of a tuned model. If provided, `model_name` is ignored | +| verbose | Boolean | This parameter controls the level of detail in the output. When set to `True`, it prints internal states of the chain to help debug (default: `False`) | +#### Outputs -[Previous Vector Stores](/components-vector-stores) +| Name | Type | Description | +|------|------|-------------| +| embeddings | Embeddings | An instance for generating embeddings using VertexAI | diff --git a/docs/docs/Components/components-helpers.md b/docs/docs/Components/components-helpers.md index ef90af07e4ca..77234609a2d5 100644 --- a/docs/docs/Components/components-helpers.md +++ b/docs/docs/Components/components-helpers.md @@ -4,167 +4,244 @@ sidebar_position: 4 slug: /components-helpers --- +# Helpers +Helper components provide utility functions to help manage data, tasks, and other components in your flow. -:::info +## Chat Memory -This page may contain outdated information. It will be updated as soon as possible. +This component retrieves and manages chat messages from Langflow tables or an external memory. -::: +### Parameters +#### Inputs +| Name | Display Name | Info | +|------|--------------|------| +| memory | External Memory | Retrieve messages from an external memory. If empty, it will use the Langflow tables. | +| sender | Sender Type | Filter by sender type. | +| sender_name | Sender Name | Filter by sender name. | +| n_messages | Number of Messages | Number of messages to retrieve. | +| session_id | Session ID | The session ID of the chat. If empty, the current session ID parameter will be used. | +| order | Order | Order of the messages. | +| template | Template | The template to use for formatting the data. It can contain the keys `{text}`, `{sender}` or any other key in the message data. | +#### Outputs -## Chat memory {#304dc4a3bea74efb9068093ff18a56ad} +| Name | Display Name | Info | +|------|--------------|------| +| messages | Messages (Data) | Retrieved messages as Data objects. | +| messages_text | Messages (Text) | Retrieved messages formatted as text. | +| lc_memory | Memory | Built LangChain memory object. | +## Combine Text -This component retrieves stored chat messages based on a specific session ID. - - -### Parameters {#e0af57d97f844ce99789958161d19767} - -- **Sender type:** Choose the sender type from options like "Machine", "User", or "Both". -- **Sender name:** (Optional) The name of the sender. -- **Number of messages:** Number of messages to retrieve. -- **Session ID:** The session ID of the chat history. -- **Order:** Choose the message order, either "Ascending" or "Descending". -- **Data template:** (Optional) Template to convert a record to text. If left empty, the system dynamically sets it to the record's text key. - ---- - - -### Combine text {#13443183e6054d0694d65f8df08833d5} +This component concatenates two text sources into a single text chunk using a specified delimiter. +### Parameters -This component concatenates two text sources into a single text chunk using a specified delimiter. +#### Inputs +| Name | Display Name | Info | +|------|--------------|------| +| first_text | First Text | The first text input to concatenate. | +| second_text | Second Text | The second text input to concatenate. | +| delimiter | Delimiter | A string used to separate the two text inputs. Defaults to a space. | -### Parameters {#246676d119604fc5bf1be85fe93044aa} +## Create List -- **First text:** The first text input to concatenate. -- **Second text:** The second text input to concatenate. -- **Delimiter:** A string used to separate the two text inputs. Defaults to a space. +This component dynamically creates a record with a specified number of fields. ---- +### Parameters +#### Inputs -### Create record {#506f43345854473b8199631bf68a3b4a} +| Name | Display Name | Info | +|------|--------------|------| +| n_fields | Number of Fields | Number of fields to be added to the record. | +| text_key | Text Key | Key used as text. | +## Custom Component -This component dynamically creates a record with a specified number of fields. +Use this component as a template to create your custom component. +For more, see [Custom Components](components-custom-components). -### Parameters {#08735e90bd10406695771bad8a95976a} +## Filter Data -- **Number of fields:** Number of fields to be added to the record. -- **Text key:** Key used as text. +This component filters a Data object based on a list of keys. ---- +### Parameters +#### Inputs -### Custom component {#cda421d4bccb4e7db2e48615884ed753} +| Name | Display Name | Info | +|------|--------------|------| +| data | Data | Data object to filter. | +| filter_criteria | Filter Criteria | List of keys to filter by. | +#### Outputs -Use this component as a template to create your custom component. +| Name | Display Name | Info | +|------|--------------|------| +| filtered_data | Filtered Data | A new Data object containing only the key-value pairs that match the filter criteria. | +## Hierarchical Task -### Parameters {#04f9eb5e6da4431593a5bee8831f2327} +This component creates and manages hierarchical tasks for CrewAI agents in a Playground environment. -- **Parameter:** Describe the purpose of this parameter. +For more information, see the [CrewAI documentation](https://docs.crewai.com/how-to/Hierarchical/). -INFO +### Parameters +#### Inputs -Customize the `build_config` and `build` methods according to your requirements. +| Name | Display Name | Info | +|------|--------------|------| +| task_description | Description | Descriptive text detailing task's purpose and execution. | +| expected_output | Expected Output | Clear definition of expected task outcome. | +| tools | Tools | List of tools/resources limited for task execution. Uses the Agent tools by default. | +#### Outputs -Learn more about creating custom components at [Custom Component](http://docs.langflow.org/components/custom). +| Name | Display Name | Info | +|------|--------------|------| +| task_output | Task | The built hierarchical task. | +## ID Generator ---- +This component generates a unique ID. +### Parameters -### Documents to Data {#53a6a99a54f0435e9209169cf7730c55} +#### Outputs +| Name | Display Name | Info | +|------|--------------|------| +| value | Value | Unique ID generated. | -Convert LangChain documents into Data. +## Parse JSON +This component converts and extracts JSON fields using JQ queries. -### Parameters {#0eb5fce528774c2db4a3677973e75cf8} +### Parameters -- **Documents:** Documents to be converted into Data. +#### Inputs ---- +| Name | Display Name | Info | +|------|--------------|------| +| input_value | Input | Data object to filter. Can be a Message or Data object. | +| query | JQ Query | JQ Query to filter the data. The input is always a JSON list. | +#### Outputs -### ID generator {#4a8fbfb95ebe44ee8718725546db5393} +| Name | Display Name | Info | +|------|--------------|------| +| filtered_data | Filtered Data | Filtered data as a list of Data objects. | +## Merge Data -Generates a unique ID. +This component combines multiple data sources into a single unified Data object. +The component iterates through the input list of Data objects, merging them into a single Data object. If the input list is empty, it returns an empty Data object. If there's only one input Data object, it returns that object unchanged. The merging process uses the addition operator to combine Data objects. -### Parameters {#4629dd15594c47399c97d9511060e114} +### Parameters -- **Value:** Unique ID generated. +#### Inputs ---- +| Name | Display Name | Info | +|------|--------------|------| +| data | Data | A list of Data objects to be merged | +#### Outputs -### Message history {#6a1a60688641490197c6443df573960e} +| Name | Display Name | Info | +|------|--------------|------| +| merged_data | Merged Data | A single Data object containing the combined information from all input Data objects | -Retrieves stored chat messages based on a specific session ID. +## Parse Data +The ParseData component converts Data objects into plain text using a specified template. +This component transforms structured data into human-readable text formats, allowing for customizable output through the use of templates. -### Parameters {#31c7fc2a3e8c4f7c89f923e700f4ea34} +### Parameters -- **Sender type:** Options for the sender type. -- **Sender name:** Sender name. -- **Number of messages:** Number of messages to retrieve. -- **Session ID:** Session ID of the chat history. -- **Order:** Order of the messages. +#### Inputs ---- +| Name | Display Name | Info | +|------|--------------|------| +| data | Data | The data to convert to text | +| template | Template | The template to use for formatting the data. It can contain the keys `{text}`, `{data}` or any other key in the Data | +| sep | Separator | The separator to use between multiple data items | +#### Outputs -### Data to text {#f60ab5bbc0db4b27b427897eba97fe29} +| Name | Display Name | Info | +|------|--------------|------| +| text | Text | The resulting formatted text string as a Message object | +## Sequential Task -Convert Data into plain text following a specified template. +This component creates and manage sequential tasks for CrewAI agents. It builds a SequentialTask object with the provided description, expected output, and agent, allowing for the specification of tools and asynchronous execution. +For more information, see the [CrewAI documentation](https://docs.crewai.com/how-to/Sequential/). -### Parameters {#01b91376569149a49cfcfd9321323688} +### Parameters -- **Data:** The Data to convert to text. -- **Template:** The template used for formatting the Data. It can contain keys like `{text}`, `{data}`, or any other key in the record. +#### Inputs ---- +| Name | Display Name | Info | +|------|--------------|------| +| task_description | Description | Descriptive text detailing task's purpose and execution. | +| expected_output | Expected Output | Clear definition of expected task outcome. | +| tools | Tools | List of tools/resources limited for task execution. Uses the Agent tools by default. | +| agent | Agent | CrewAI Agent that will perform the task. | +| task | Task | CrewAI Task that will perform the task. | +| async_execution | Async Execution | Boolean flag indicating asynchronous task execution. | +#### Outputs -### Split text {#210be0ae518d411695d6caafdd7700eb} +| Name | Display Name | Info | +|------|--------------|------| +| task_output | Task | The built sequential task or list of tasks. | +## Split Text -Split text into chunks of a specified length. +This component splits text into chunks of a specified length. +### Parameters -### Parameters {#04197fcd05e64e10b189de1171a32682} +#### Inputs -- **Texts:** Texts to split. -- **Separators:** Characters to split on. Defaults to a space. -- **Max chunk size:** The maximum length (in characters) of each chunk. -- **Chunk overlap:** The amount of character overlap between chunks. -- **Recursive:** Whether to split recursively. +| Name | Display Name | Info | +|------|--------------|------| +| texts | Texts | Texts to split. | +| separators | Separators | Characters to split on. Defaults to a space. | +| max_chunk_size | Max Chunk Size | The maximum length (in characters) of each chunk. | +| chunk_overlap | Chunk Overlap | The amount of character overlap between chunks. | +| recursive | Recursive | Whether to split recursively. | ---- +## Store Message +This component stores chat messages or text into Langflow tables or an external memory. -### Update record {#d3b6116dfd8d4af080ad01bc8fd2b0b3} +It provides flexibility in managing message storage and retrieval within a chat system. +### Parameters -Update a record with text-based key/value pairs, similar to updating a Python dictionary. +#### Inputs +| Name | Display Name | Info | +|------|--------------|------| +| message | Message | The chat message to be stored. (Required) | +| memory | External Memory | The external memory to store the message. If empty, it will use the Langflow tables. | +| sender | Sender | The sender of the message. Can be Machine or User. If empty, the current sender parameter will be used. | +| sender_name | Sender Name | The name of the sender. Can be AI or User. If empty, the current sender parameter will be used. | +| session_id | Session ID | The session ID of the chat. If empty, the current session ID parameter will be used. | -### Parameters {#c830224edc1d486aaaa5e2889f4f6689} +#### Outputs -- **Data:** The record to update. -- **New data:** The new data to update the record with. +| Name | Display Name | Info | +|------|--------------|------| +| stored_messages | Stored Messages | The list of stored messages after the current message has been added. | diff --git a/docs/docs/Components/components-io.md b/docs/docs/Components/components-io.md index 1a9862c1689c..2b8f22c8d507 100644 --- a/docs/docs/Components/components-io.md +++ b/docs/docs/Components/components-io.md @@ -4,126 +4,86 @@ sidebar_position: 1 slug: /components-io --- +# Inputs & Outputs +This category of components defines where data enters and exits your flow. They dynamically alter the Playground and can be renamed to facilitate building and maintaining your flows. -Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. They also dynamically change the Playground and can be renamed to facilitate building and maintaining your flows. +## Inputs +Inputs are components used to define where data enters your flow. They can receive data from various sources, such as users, databases, or any other source that can be converted to Text or Data. -## Inputs {#6b1421ec66994d5ebe9fcce000829328} - - ---- - - -Inputs are components used to define where data enters your flow. They can receive data from the user, a database, or any other source that can be converted to Text or Data. +### Chat Input +This component collects user input from the chat. The difference between Chat Input and other Input components is the output format, the number of configurable fields, and the way they are displayed in the Playground. +Chat Input components can output Text or Data. When you want to pass the sender name or sender to the next component, use the Data output. To pass only the message, use the Text output. Passing only the message is useful when saving the message to a database or a memory system like Zep. -Chat Input components can output `Text` or `Data`. When you want to pass the sender name or sender to the next component, use the `Data` output. To pass only the message, use the `Text` output, useful when saving the message to a database or memory system like Zep. - - -You can find out more about Chat Input and other Inputs [here](/components-io). - - -### Chat Input {#2a5f02262f364f8fb75bcfa246e7bb26} - - -This component collects user input from the chat. - - -**Parameters** +#### Parameters -- **Sender Type:** Specifies the sender type. Defaults to `User`. Options are `Machine` and `User`. -- **Sender Name:** Specifies the name of the sender. Defaults to `User`. -- **Message:** Specifies the message text. It is a multiline text input. -- **Session ID:** Specifies the session ID of the chat history. If provided, the message will be saved in the Message History. +| Name | Display Name | Info | +|--------------|--------------|---------------------------------------------------------------------| +| Sender Type | Sender Type | Specifies the sender type (User or Machine). Defaults to User | +| Sender Name | Sender Name | Specifies the name of the sender. Defaults to User | +| Message | Message | Specifies the message text. Multiline text input | +| Session ID | Session ID | Specifies the session ID of the chat history | :::note - -If `As Data` is `true` and the `Message` is a `Data`, the data of the `Data` will be updated with the `Sender`, `Sender Name`, and `Session ID`. - +If "As Data" is true and the "Message" is a Data, the data will be updated with the Sender, Sender Name, and Session ID. ::: +### Text Input +This component adds an Input field on the Playground, allowing parameter definition while running and testing your flow. +The Data Template field specifies how a Data should be converted into Text. This is particularly useful when you want to extract specific information from a Data and pass it as text to the next component in the sequence. -One significant capability of the Chat Input component is its ability to transform the Playground into a chat window. This feature is particularly valuable for scenarios requiring user input to initiate or influence the flow. - - -### Text Input {#260aef3726834896b496b56cdefb6d4a} - - -The **Text Input** component adds an **Input** field on the Playground. This enables you to define parameters while running and testing your flow. - - -**Parameters** - -- **Value:** Specifies the text input value. This is where the user inputs text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string. -- **Data Template:** Specifies how a `Data` should be converted into `Text`. +For example, if you have a Data with the following structure: -The **Data Template** field is used to specify how a `Data` should be converted into `Text`. This is particularly useful when you want to extract specific information from a `Data` and pass it as text to the next component in the sequence. +```json +{ "name": "John Doe", "age": 30, "email": "johndoe@email.com"} +``` +A template with Name: `{name}, Age: {age}` will convert the Data into a text string of `Name: John Doe, Age: 30`. -For example, if you have a `Data` with the following structure: +If you pass more than one Data, the text will be concatenated with a new line separator. +#### Parameters -`{ "name": "John Doe", "age": 30, "email": "johndoe@email.com"}` +| Name | Display Name | Info | +|---------------|---------------|--------------------------------------------------------------------| +| Value | Value | Specifies the text input value. Defaults to an empty string | +| Data Template | Data Template | Specifies how a Data should be converted into Text | +## Outputs -A template with `Name: {name}, Age: {age}` will convert the `Data` into a text string of `Name: John Doe, Age: 30`. - - -If you pass more than one `Data`, the text will be concatenated with a new line separator. - - -## Outputs {#f62c5ad37a6f45a39b463c9b35ce7842} - - ---- - - -Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Playground, or to define how the data will be displayed in the Playground. - - -The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user. - - -You can find out more about it and the other Outputs [here](/components-io). - - -### Chat Output {#1edd49b72781432ea29d70acbda4e7e7} +Outputs define where data exits your flow. They can send data to the user, the Playground, or define how data will be displayed in the Playground. +### Chat Output This component sends a message to the chat. +#### Parameters -**Parameters** - -- **Sender Type:** Specifies the sender type. Default is `"Machine"`. Options are `"Machine"` and `"User"`. -- **Sender Name:** Specifies the sender's name. Default is `"AI"`. -- **Session ID:** Specifies the session ID of the chat history. If provided, messages are saved in the Message History. -- **Message:** Specifies the text of the message. +| Name | Display Name | Info | +|--------------|--------------|---------------------------------------------------------------------| +| Sender Type | Sender Type | Specifies the sender type (Machine or User). Defaults to Machine | +| Sender Name | Sender Name | Specifies the sender's name. Defaults to AI | +| Session ID | Session ID | Specifies the session ID of the chat history | +| Message | Message | Specifies the text of the message | :::note - -If `As Data` is `true` and the `Message` is a `Data`, the data in the `Data` is updated with the `Sender`, `Sender Name`, and `Session ID`. - +If "As Data" is true and the "Message" is a Data, the data will be updated with the Sender, Sender Name, and Session ID. ::: +### Text Output +This component displays text data to the user without sending it to the chat. Defaults to an empty string. +#### Parameters -### Text Output {#b607000bc0c5402db0433c1a7d734d01} - - -This component displays text data to the user. It is useful when you want to show text without sending it to the chat. - - -**Parameters** - -- **Value:** Specifies the text data to be displayed. Defaults to an empty string. - -The `TextOutput` component provides a simple way to display text data. It allows textual data to be visible in the chat window during your interaction flow. +| Name | Display Name | Info | +|-------|--------------|----------------------------------------------------------| +| Value | Value | Specifies the text data to be displayed | diff --git a/docs/docs/Components/components-loaders.md b/docs/docs/Components/components-loaders.md new file mode 100644 index 000000000000..d0d9cd910672 --- /dev/null +++ b/docs/docs/Components/components-loaders.md @@ -0,0 +1,74 @@ +--- +title: Loaders +sidebar_position: 10 +slug: /components-loaders +--- + +# Loaders + +Loaders are components used to load documents from various sources, such as databases, websites, and local files. They can be used to fetch data from external sources and convert it into a format that can be processed by other components. + +## Confluence + +The Confluence component integrates with the Confluence wiki collaboration platform to load and process documents. It utilizes the ConfluenceLoader from LangChain to fetch content from a specified Confluence space. + +### Parameters + +#### Inputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| url | Site URL | The base URL of the Confluence Space (e.g., https://company.atlassian.net/wiki) | +| username | Username | Atlassian User E-mail (e.g., email@example.com) | +| api_key | API Key | Atlassian API Key (Create at: https://id.atlassian.com/manage-profile/security/api-tokens) | +| space_key | Space Key | The key of the Confluence space to access | +| cloud | Use Cloud? | Whether to use Confluence Cloud (default: true) | +| content_format | Content Format | Specify content format (default: STORAGE) | +| max_pages | Max Pages | Maximum number of pages to retrieve (default: 1000) | + +#### Outputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| data | Data | List of Data objects containing the loaded Confluence documents | + +## GitLoader + +The GitLoader component uses the GitLoader from LangChain to fetch and load documents from a specified Git repository. + +### Parameters + +#### Inputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| repo_path | Repository Path | The local path to the Git repository | +| clone_url | Clone URL | The URL to clone the Git repository from (optional) | +| branch | Branch | The branch to load files from (default: 'main') | +| file_filter | File Filter | Patterns to filter files (e.g., '.py' to include only .py files, '!.py' to exclude .py files) | +| content_filter | Content Filter | A regex pattern to filter files based on their content | + +#### Outputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| data | Data | List of Data objects containing the loaded Git repository documents | + +## Unstructured + +This component uses the [Unstructured](https://unstructured.io/) library to load and parse PDF, DOCX, and TXT files into structured data. This component works with both the open-source library and the Unstructured API. + +### Parameters + +#### Inputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| file | File | The path to the file to be parsed (supported types: pdf, docx, txt) | +| api_key | API Key | Unstructured API Key (optional, if not provided, open-source library will be used) | + +#### Outputs: + +| Name | Display Name | Info | +| --- | --- | --- | +| data | Data | List of Data objects containing the parsed content from the input file | \ No newline at end of file diff --git a/docs/docs/Components/components-memories.md b/docs/docs/Components/components-memories.md new file mode 100644 index 000000000000..5dfd45d0a436 --- /dev/null +++ b/docs/docs/Components/components-memories.md @@ -0,0 +1,70 @@ +# Memories + +Chat memory components store and retrieve chat messages by `session_id`. + +## AstraDBChatMemory Component + +This component creates an `AstraDBChatMessageHistory` instance, which allows for storing and retrieving chat messages using Astra DB, a cloud-native database service. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------|---------------|-----------------------------------------------------------------------| +| collection_name | String | Name of the Astra DB collection for storing messages. Required. | +| token | SecretString | Authentication token for Astra DB access. Required. | +| api_endpoint | SecretString | API endpoint URL for the Astra DB service. Required. | +| namespace | String | Optional namespace within Astra DB for the collection. | +| session_id | MessageText | Chat session ID. Uses current session ID if not provided. | + +#### Outputs + +| Name | Type | Description | +|-----------------|-------------------------|-----------------------------------------------------------| +| message_history | BaseChatMessageHistory | An instance of AstraDBChatMessageHistory for the session. | + +## CassandraChatMemory Component + +This component creates a `CassandraChatMessageHistory` instance, enabling storage and retrieval of chat messages using Apache Cassandra or DataStax Astra DB. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|----------------|---------------|-------------------------------------------------------------------------------| +| database_ref | MessageText | Contact points for Cassandra or Astra DB database ID. Required. | +| username | MessageText | Username for Cassandra (leave empty for Astra DB). | +| token | SecretString | Password for Cassandra or token for Astra DB. Required. | +| keyspace | MessageText | Keyspace in Cassandra or namespace in Astra DB. Required. | +| table_name | MessageText | Name of the table or collection for storing messages. Required. | +| session_id | MessageText | Unique identifier for the chat session. Optional. | +| cluster_kwargs | Dictionary | Additional keyword arguments for Cassandra cluster configuration. Optional. | + +#### Outputs + +| Name | Type | Description | +|-----------------|-------------------------|--------------------------------------------------------------| +| message_history | BaseChatMessageHistory | An instance of CassandraChatMessageHistory for the session. | + +## ZepChatMemory Component + +This component creates a `ZepChatMessageHistory` instance, enabling storage and retrieval of chat messages using Zep, a memory server for Large Language Models (LLMs). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|---------------|---------------|-----------------------------------------------------------| +| url | MessageText | URL of the Zep instance. Required. | +| api_key | SecretString | API Key for authentication with the Zep instance. | +| api_base_path | Dropdown | API version to use. Options: "api/v1" or "api/v2". | +| session_id | MessageText | Unique identifier for the chat session. Optional. | + +#### Outputs + +| Name | Type | Description | +|-----------------|-------------------------|-------------------------------------------------------| +| message_history | BaseChatMessageHistory | An instance of ZepChatMessageHistory for the session. | \ No newline at end of file diff --git a/docs/docs/Components/components-models.md b/docs/docs/Components/components-models.md index e30383dd5ac8..bcc85cb83ed9 100644 --- a/docs/docs/Components/components-models.md +++ b/docs/docs/Components/components-models.md @@ -4,315 +4,359 @@ sidebar_position: 5 slug: /components-models --- +# Models +Model components are used to generate text using language models. These components can be used to generate text for various tasks such as chatbots, content generation, and more. -:::info +## AI/ML API -This page may contain outdated information. It will be updated as soon as possible. +This component creates a ChatOpenAI model instance using the AIML API. -::: +For more information, see [AIML documentation](https://docs.aimlapi.com/). +### Parameters +#### Inputs +| Name | Type | Description | +|--------------|-------------|---------------------------------------------------------------------------------------------| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Range: 0-128000. | +| model_kwargs | Dictionary | Additional keyword arguments for the model. | +| model_name | String | The name of the AIML model to use. Options are predefined in AIML_CHAT_MODELS. | +| aiml_api_base| String | The base URL of the AIML API. Defaults to https://api.aimlapi.com. | +| api_key | SecretString| The AIML API Key to use for the model. | +| temperature | Float | Controls randomness in the output. Default: 0.1. | +| seed | Integer | Controls reproducibility of the job. | -## Amazon Bedrock {#3b8ceacef3424234814f95895a25bf43} +#### Outputs +| Name | Type | Description | +|-------|---------------|------------------------------------------------------------------| +| model | LanguageModel | An instance of ChatOpenAI configured with the specified parameters. | -This component facilitates the generation of text using the LLM (Large Language Model) model from Amazon Bedrock. +## Amazon Bedrock +This component generates text using Amazon Bedrock LLMs. -**Params** +For more information, see [Amazon Bedrock documentation](https://docs.aws.amazon.com/bedrock). -- **Input Value:** Specifies the input text for text generation. -- **System Message (Optional):** A system message to pass to the model. -- **Model ID (Optional):** Specifies the model ID to be used for text generation. Defaults to `"anthropic.claude-instant-v1"`. Available options include: - - `"ai21.j2-grande-instruct"` - - `"ai21.j2-jumbo-instruct"` - - `"ai21.j2-mid"` - - `"ai21.j2-mid-v1"` - - `"ai21.j2-ultra"` - - `"ai21.j2-ultra-v1"` - - `"anthropic.claude-instant-v1"` - - `"anthropic.claude-v1"` - - `"anthropic.claude-v2"` - - `"cohere.command-text-v14"` -- **Credentials Profile Name (Optional):** Specifies the name of the credentials profile. -- **Region Name (Optional):** Specifies the region name. -- **Model Kwargs (Optional):** Additional keyword arguments for the model. -- **Endpoint URL (Optional):** Specifies the endpoint URL. -- **Streaming (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **Cache (Optional):** Specifies whether to cache the response. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. +### Parameters -NOTE +#### Inputs +| Name | Type | Description | +|------------------------|--------------|-------------------------------------------------------------------------------------| +| model_id | String | The ID of the Amazon Bedrock model to use. Options include various models. | +| aws_access_key | SecretString | AWS Access Key for authentication. | +| aws_secret_key | SecretString | AWS Secret Key for authentication. | +| credentials_profile_name | String | Name of the AWS credentials profile to use (advanced). | +| region_name | String | AWS region name. Default: "us-east-1". | +| model_kwargs | Dictionary | Additional keyword arguments for the model (advanced). | +| endpoint_url | String | Custom endpoint URL for the Bedrock service (advanced). | -Ensure that necessary credentials are provided to connect to the Amazon Bedrock API. If connection fails, a ValueError will be raised. +#### Outputs +| Name | Type | Description | +|-------|---------------|-------------------------------------------------------------------| +| model | LanguageModel | An instance of ChatBedrock configured with the specified parameters. | ---- +## Anthropic +This component allows the generation of text using Anthropic Chat and Language models. -## Anthropic {#a6ae46f98c4c4d389d44b8408bf151a1} +For more information, see the [Anthropic documentation](https://docs.anthropic.com/en/docs/welcome). +### Parameters -This component allows the generation of text using Anthropic Chat&Completion large language models. +#### Inputs +| Name | Type | Description | +|---------------------|-------------|----------------------------------------------------------------------------------------| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Default: 4096.| +| model | String | The name of the Anthropic model to use. Options include various Claude 3 models. | +| anthropic_api_key | SecretString| Your Anthropic API key for authentication. | +| temperature | Float | Controls randomness in the output. Default: 0.1. | +| anthropic_api_url | String | Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified (advanced). | +| prefill | String | Prefill text to guide the model's response (advanced). | -**Params** +#### Outputs -- **Model Name:** Specifies the name of the Anthropic model to be used for text generation. Available options include (and not limited to): - - `"claude-2.1"` - - `"claude-2.0"` - - `"claude-instant-1.2"` - - `"claude-instant-1"` -- **Anthropic API Key:** Your Anthropic API key. -- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to `256`. -- **Temperature (Optional):** Specifies the sampling temperature. Defaults to `0.7`. -- **API Endpoint (Optional):** Specifies the endpoint of the Anthropic API. Defaults to `"https://api.anthropic.com"`if not specified. -- **Input Value:** Specifies the input text for text generation. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +| Name | Type | Description | +|-------|---------------|------------------------------------------------------------------| +| model | LanguageModel | An instance of ChatAnthropic configured with the specified parameters. | -For detailed documentation and integration guides, please refer to the [Anthropic Component Documentation](https://python.langchain.com/docs/integrations/chat/anthropic). +## Azure OpenAI +This component generates text using Azure OpenAI LLM. ---- +For more information, see the [Azure OpenAI documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/). +### Parameters -## Azure OpenAI {#7e3bff29ce714479b07feeb4445680cd} +#### Inputs +| Name | Display Name | Info | +|---------------------|---------------------|---------------------------------------------------------------------------------| +| Model Name | Model Name | Specifies the name of the Azure OpenAI model to be used for text generation. | +| Azure Endpoint | Azure Endpoint | Your Azure endpoint, including the resource. | +| Deployment Name | Deployment Name | Specifies the name of the deployment. | +| API Version | API Version | Specifies the version of the Azure OpenAI API to be used. | +| API Key | API Key | Your Azure OpenAI API key. | +| Temperature | Temperature | Specifies the sampling temperature. Defaults to `0.7`. | +| Max Tokens | Max Tokens | Specifies the maximum number of tokens to generate. Defaults to `1000`. | +| Input Value | Input Value | Specifies the input text for text generation. | +| Stream | Stream | Specifies whether to stream the response from the model. Defaults to `False`. | -This component allows the generation of text using the LLM (Large Language Model) model from Azure OpenAI. +## Cohere +This component generates text using Cohere's language models. -**Params** +For more information, see the [Cohere documentation](https://cohere.ai/). -- **Model Name:** Specifies the name of the Azure OpenAI model to be used for text generation. Available options include: - - `"gpt-35-turbo"` - - `"gpt-35-turbo-16k"` - - `"gpt-35-turbo-instruct"` - - `"gpt-4"` - - `"gpt-4-32k"` - - `"gpt-4-vision"` - - `"gpt-4o"` -- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`. -- **Deployment Name:** Specifies the name of the deployment. -- **API Version:** Specifies the version of the Azure OpenAI API to be used. Available options include: - - `"2023-03-15-preview"` - - `"2023-05-15"` - - `"2023-06-01-preview"` - - `"2023-07-01-preview"` - - `"2023-08-01-preview"` - - `"2023-09-01-preview"` - - `"2023-12-01-preview"` -- **API Key:** Your Azure OpenAI API key. -- **Temperature (Optional):** Specifies the sampling temperature. Defaults to `0.7`. -- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to `1000`. -- **Input Value:** Specifies the input text for text generation. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +### Parameters -For detailed documentation and integration guides, please refer to the [Azure OpenAI Component Documentation](https://python.langchain.com/docs/integrations/llms/azure_openai). +#### Inputs +| Name | Display Name | Info | +|---------------------|--------------------|----------------------------------------------------------| +| Cohere API Key | Cohere API Key | Your Cohere API key. | +| Max Tokens | Max Tokens | Specifies the maximum number of tokens to generate. Defaults to `256`. | +| Temperature | Temperature | Specifies the sampling temperature. Defaults to `0.75`. | +| Input Value | Input Value | Specifies the input text for text generation. | ---- +## Google Generative AI +This component generates text using Google's Generative AI models. -## Cohere {#706396a33bf94894966c95571252d78b} +For more information, see the [Google Generative AI documentation](https://cloud.google.com/ai-platform/training/docs/algorithms/gpt-3). +### Parameters -This component enables text generation using Cohere large language models. +#### Inputs +| Name | Display Name | Info | +|---------------------|--------------------|-----------------------------------------------------------------------| +| Google API Key | Google API Key | Your Google API key to use for the Google Generative AI. | +| Model | Model | The name of the model to use, such as `"gemini-pro"`. | +| Max Output Tokens | Max Output Tokens | The maximum number of tokens to generate. | +| Temperature | Temperature | Run inference with this temperature. | +| Top K | Top K | Consider the set of top K most probable tokens. | +| Top P | Top P | The maximum cumulative probability of tokens to consider when sampling. | +| N | N | Number of chat completions to generate for each prompt. | -**Params** +## Groq -- **Cohere API Key:** Your Cohere API key. -- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to `256`. -- **Temperature (Optional):** Specifies the sampling temperature. Defaults to `0.75`. -- **Input Value:** Specifies the input text for text generation. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +This component generates text using Groq's language models. ---- +For more information, see the [Groq documentation](https://groq.com/). +### Parameters -## Google Generative AI {#074d9623463449f99d41b44699800e8a} +#### Inputs +| Name | Type | Description | +|----------------|---------------|-----------------------------------------------------------------| +| groq_api_key | SecretString | API key for the Groq API. | +| groq_api_base | String | Base URL path for API requests. Default: "https://api.groq.com" (advanced). | +| max_tokens | Integer | The maximum number of tokens to generate (advanced). | +| temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.1. | +| n | Integer | Number of chat completions to generate for each prompt (advanced). | +| model_name | String | The name of the Groq model to use. Options are dynamically fetched from the Groq API. | -This component enables text generation using Google Generative AI. +#### Outputs +| Name | Type | Description | +|-------|---------------|------------------------------------------------------------------| +| model | LanguageModel | An instance of ChatGroq configured with the specified parameters. | -**Params** +## Hugging Face API -- **Google API Key:** Your Google API key to use for the Google Generative AI. -- **Model:** The name of the model to use. Supported examples are `"gemini-pro"` and `"gemini-pro-vision"`. -- **Max Output Tokens (Optional):** The maximum number of tokens to generate. -- **Temperature:** Run inference with this temperature. Must be in the closed interval [0.0, 1.0]. -- **Top K (Optional):** Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive. -- **Top P (Optional):** The maximum cumulative probability of tokens to consider when sampling. -- **N (Optional):** Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated. -- **Input Value:** The input to the model. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +This component generates text using Hugging Face's language models. ---- +For more information, see the [Hugging Face documentation](https://huggingface.co/). +### Parameters -## Hugging Face API {#c1267b9a6b36487cb2ee127ce9b64dbb} +#### Inputs +| Name | Display Name | Info | +|---------------------|-------------------|-------------------------------------------| +| Endpoint URL | Endpoint URL | The URL of the Hugging Face Inference API endpoint. | +| Task | Task | Specifies the task for text generation. | +| API Token | API Token | The API token required for authentication.| +| Model Kwargs | Model Kwargs | Additional keyword arguments for the model.| +| Input Value | Input Value | The input text for text generation. | -This component facilitates text generation using LLM models from the Hugging Face Inference API. +## Maritalk +This component generates text using Maritalk LLMs. -**Params** +For more information, see [Maritalk documentation](https://www.maritalk.com/). -- **Endpoint URL:** The URL of the Hugging Face Inference API endpoint. Should be provided along with necessary authentication credentials. -- **Task:** Specifies the task for text generation. Options include `"text2text-generation"`, `"text-generation"`, and `"summarization"`. -- **API Token:** The API token required for authentication with the Hugging Face Hub. -- **Model Keyword Arguments (Optional):** Additional keyword arguments for the model. Should be provided as a Python dictionary. -- **Input Value:** The input text for text generation. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +### Parameters ---- +#### Inputs +| Name | Type | Description | +|----------------|---------------|-----------------------------------------------------------------| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. Default: 512. | +| model_name | String | The name of the Maritalk model to use. Options: "sabia-2-small", "sabia-2-medium". Default: "sabia-2-small". | +| api_key | SecretString | The Maritalk API Key to use for authentication. | +| temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.5. | +| endpoint_url | String | The Maritalk API endpoint. Default: https://api.maritalk.com. | -## LiteLLM Model {#9fb59dad3b294a05966320d39f483a50} +#### Outputs +| Name | Type | Description | +|-------|---------------|------------------------------------------------------------------| +| model | LanguageModel | An instance of ChatMaritalk configured with the specified parameters. | -Generates text using the `LiteLLM` collection of large language models. +## Mistral +This component generates text using MistralAI LLMs. -**Parameters** +For more information, see [Mistral AI documentation](https://docs.mistral.ai/). -- **Model name:** The name of the model to use. For example, `gpt-3.5-turbo`. (Type: str) -- **API key:** The API key to use for accessing the provider's API. (Type: str, Optional) -- **Provider:** The provider of the API key. (Type: str, Choices: "OpenAI", "Azure", "Anthropic", "Replicate", "Cohere", "OpenRouter") -- **Temperature:** Controls the randomness of the text generation. (Type: float, Default: 0.7) -- **Model kwargs:** Additional keyword arguments for the model. (Type: Dict, Optional) -- **Top p:** Filter responses to keep the cumulative probability within the top p tokens. (Type: float, Optional) -- **Top k:** Filter responses to only include the top k tokens. (Type: int, Optional) -- **N:** Number of chat completions to generate for each prompt. (Type: int, Default: 1) -- **Max tokens:** The maximum number of tokens to generate for each chat completion. (Type: int, Default: 256) -- **Max retries:** Maximum number of retries for failed requests. (Type: int, Default: 6) -- **Verbose:** Whether to print verbose output. (Type: bool, Default: False) -- **Input:** The input prompt for text generation. (Type: str) -- **Stream:** Whether to stream the output. (Type: bool, Default: False) -- **System message:** System message to pass to the model. (Type: str, Optional) +### Parameters ---- +#### Inputs +| Name | Type | Description | +|---------------------|--------------|-----------------------------------------------------------------------------------------------| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens (advanced). | +| model_name | String | The name of the Mistral AI model to use. Options include "open-mixtral-8x7b", "open-mixtral-8x22b", "mistral-small-latest", "mistral-medium-latest", "mistral-large-latest", and "codestral-latest". Default: "codestral-latest". | +| mistral_api_base | String | The base URL of the Mistral API. Defaults to https://api.mistral.ai/v1 (advanced). | +| api_key | SecretString | The Mistral API Key to use for authentication. | +| temperature | Float | Controls randomness in the output. Default: 0.5. | +| max_retries | Integer | Maximum number of retries for API calls. Default: 5 (advanced). | +| timeout | Integer | Timeout for API calls in seconds. Default: 60 (advanced). | +| max_concurrent_requests | Integer | Maximum number of concurrent API requests. Default: 3 (advanced). | +| top_p | Float | Nucleus sampling parameter. Default: 1 (advanced). | +| random_seed | Integer | Seed for random number generation. Default: 1 (advanced). | +| safe_mode | Boolean | Enables safe mode for content generation (advanced). | +#### Outputs +| Name | Type | Description | +|--------|---------------|-----------------------------------------------------| +| model | LanguageModel | An instance of ChatMistralAI configured with the specified parameters. | -## Ollama {#14e8e411d28d4711add53bfc3e52c6cd} - - -Generate text using Ollama Local LLMs. - - -**Parameters** - -- **Base URL:** Endpoint of the Ollama API. Defaults to '[http://localhost:11434](http://localhost:11434/)' if not specified. -- **Model Name:** The model name to use. Refer to [Ollama Library](https://ollama.ai/library) for more models. -- **Temperature:** Controls the creativity of model responses. (Default: 0.8) -- **Cache:** Enable or disable caching. (Default: False) -- **Format:** Specify the format of the output (e.g., json). (Advanced) -- **Metadata:** Metadata to add to the run trace. (Advanced) -- **Mirostat:** Enable/disable Mirostat sampling for controlling perplexity. (Default: Disabled) -- **Mirostat Eta:** Learning rate for Mirostat algorithm. (Default: None) (Advanced) -- **Mirostat Tau:** Controls the balance between coherence and diversity of the output. (Default: None) (Advanced) -- **Context Window Size:** Size of the context window for generating tokens. (Default: None) (Advanced) -- **Number of GPUs:** Number of GPUs to use for computation. (Default: None) (Advanced) -- **Number of Threads:** Number of threads to use during computation. (Default: None) (Advanced) -- **Repeat Last N:** How far back the model looks to prevent repetition. (Default: None) (Advanced) -- **Repeat Penalty:** Penalty for repetitions in generated text. (Default: None) (Advanced) -- **TFS Z:** Tail free sampling value. (Default: None) (Advanced) -- **Timeout:** Timeout for the request stream. (Default: None) (Advanced) -- **Top K:** Limits token selection to top K. (Default: None) (Advanced) -- **Top P:** Works together with top-k. (Default: None) (Advanced) -- **Verbose:** Whether to print out response text. -- **Tags:** Tags to add to the run trace. (Advanced) -- **Stop Tokens:** List of tokens to signal the model to stop generating text. (Advanced) -- **System:** System to use for generating text. (Advanced) -- **Template:** Template to use for generating text. (Advanced) -- **Input:** The input text. -- **Stream:** Whether to stream the response. -- **System Message:** System message to pass to the model. (Advanced) +## NVIDIA ---- +This component generates text using NVIDIA LLMs. +For more information, see [NVIDIA AI Foundation Models documentation](https://developer.nvidia.com/ai-foundation-models). -## OpenAI {#fe6cd793446748eda6eaad72e30f70b3} +### Parameters +#### Inputs +| Name | Type | Description | +|---------------------|--------------|-----------------------------------------------------------------------------------------------| +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens (advanced). | +| model_name | String | The name of the NVIDIA model to use. Default: "mistralai/mixtral-8x7b-instruct-v0.1". | +| base_url | String | The base URL of the NVIDIA API. Default: "https://integrate.api.nvidia.com/v1". | +| nvidia_api_key | SecretString | The NVIDIA API Key for authentication. | +| temperature | Float | Controls randomness in the output. Default: 0.1. | +| seed | Integer | The seed controls the reproducibility of the job (advanced). Default: 1. | -This component facilitates text generation using OpenAI's models. +#### Outputs +| Name | Type | Description | +|--------|---------------|-----------------------------------------------------| +| model | LanguageModel | An instance of ChatNVIDIA configured with the specified parameters. | +## Ollama -**Params** +This component generates text using Ollama's language models. -- **Input Value:** The input text for text generation. -- **Max Tokens (Optional):** The maximum number of tokens to generate. Defaults to `256`. -- **Model Kwargs (Optional):** Additional keyword arguments for the model. Should be provided as a nested dictionary. -- **Model Name (Optional):** The name of the model to use. Defaults to `gpt-4-1106-preview`. Supported options include: `gpt-4-turbo-preview`, `gpt-4-0125-preview`, `gpt-4-1106-preview`, `gpt-4-vision-preview`, `gpt-3.5-turbo-0125`, `gpt-3.5-turbo-1106`. -- **OpenAI API Base (Optional):** The base URL of the OpenAI API. Defaults to `https://api.openai.com/v1`. -- **OpenAI API Key (Optional):** The API key for accessing the OpenAI API. -- **Temperature:** Controls the creativity of model responses. Defaults to `0.7`. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** System message to pass to the model. +For more information, see [Ollama documentation](https://ollama.com/). ---- +### Parameters +#### Inputs +| Name | Display Name | Info | +|---------------------|---------------|---------------------------------------------| +| Base URL | Base URL | Endpoint of the Ollama API. | +| Model Name | Model Name | The model name to use. | +| Temperature | Temperature | Controls the creativity of model responses. | -## Qianfan {#6e4a6b2370ee4b9f8beb899e7cf9c8f6} +## OpenAI +This component generates text using OpenAI's language models. -This component facilitates the generation of text using Baidu Qianfan chat models. +For more information, see [OpenAI documentation](https://beta.openai.com/docs/). +### Parameters -**Params** +#### Inputs -- **Model Name:** Specifies the name of the Qianfan chat model to be used for text generation. Available options include: - - `"ERNIE-Bot"` - - `"ERNIE-Bot-turbo"` - - `"BLOOMZ-7B"` - - `"Llama-2-7b-chat"` - - `"Llama-2-13b-chat"` - - `"Llama-2-70b-chat"` - - `"Qianfan-BLOOMZ-7B-compressed"` - - `"Qianfan-Chinese-Llama-2-7B"` - - `"ChatGLM2-6B-32K"` - - `"AquilaChat-7B"` -- **Qianfan Ak:** Your Baidu Qianfan access key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop). -- **Qianfan Sk:** Your Baidu Qianfan secret key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop). -- **Top p (Optional):** Model parameter. Specifies the top-p value. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to `0.8`. -- **Temperature (Optional):** Model parameter. Specifies the sampling temperature. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to `0.95`. -- **Penalty Score (Optional):** Model parameter. Specifies the penalty score. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to `1.0`. -- **Endpoint (Optional):** Endpoint of the Qianfan LLM, required if custom model is used. -- **Input Value:** Specifies the input text for text generation. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** A system message to pass to the model. +| Name | Type | Description | +|---------------------|---------------|------------------------------------------------------------------| +| api_key | SecretString | Your OpenAI API Key. | +| model | String | The name of the OpenAI model to use. Options include "gpt-3.5-turbo" and "gpt-4". | +| max_tokens | Integer | The maximum number of tokens to generate. Set to 0 for unlimited tokens. | +| temperature | Float | Controls randomness in the output. Range: [0.0, 1.0]. Default: 0.7. | +| top_p | Float | Controls the nucleus sampling. Range: [0.0, 1.0]. Default: 1.0. | +| frequency_penalty | Float | Controls the frequency penalty. Range: [0.0, 2.0]. Default: 0.0. | +| presence_penalty | Float | Controls the presence penalty. Range: [0.0, 2.0]. Default: 0.0. | ---- +#### Outputs + +| Name | Type | Description | +|-------|---------------|------------------------------------------------------------------| +| model | LanguageModel | An instance of OpenAI model configured with the specified parameters. | + +## Qianfan + +This component generates text using Qianfan's language models. + +For more information, see [Qianfan documentation](https://github.com/baidubce/bce-qianfan-sdk). + +## Perplexity + +This component generates text using Perplexity's language models. + +For more information, see [Perplexity documentation](https://perplexity.ai/). + +### Parameters + +#### Inputs +| Name | Type | Description | +|---------------------|--------------|-----------------------------------------------------------------------------------------------| +| model_name | String | The name of the Perplexity model to use. Options include various Llama 3.1 models. | +| max_output_tokens | Integer | The maximum number of tokens to generate. | +| api_key | SecretString | The Perplexity API Key for authentication. | +| temperature | Float | Controls randomness in the output. Default: 0.75. | +| top_p | Float | The maximum cumulative probability of tokens to consider when sampling (advanced). | +| n | Integer | Number of chat completions to generate for each prompt (advanced). | +| top_k | Integer | Number of top tokens to consider for top-k sampling. Must be positive (advanced). | + +#### Outputs +| Name | Type | Description | +|--------|---------------|-----------------------------------------------------| +| model | LanguageModel | An instance of ChatPerplexity configured with the specified parameters. | +## VertexAI -## Vertex AI {#86b7d539e17c436fb758c47ec3ffb084} +This component generates text using Vertex AI LLMs. +For more information, see [Google Vertex AI documentation](https://cloud.google.com/vertex-ai). -The `ChatVertexAI` is a component for generating text using Vertex AI Chat large language models API. +### Parameters +#### Inputs +| Name | Type | Description | +|---------------------|--------------|-----------------------------------------------------------------------------------------------| +| credentials | File | JSON credentials file. Leave empty to fallback to environment variables. File type: JSON. | +| model_name | String | The name of the Vertex AI model to use. Default: "gemini-1.5-pro". | +| project | String | The project ID (advanced). | +| location | String | The location for the Vertex AI API. Default: "us-central1" (advanced). | +| max_output_tokens | Integer | The maximum number of tokens to generate (advanced). | +| max_retries | Integer | Maximum number of retries for API calls. Default: 1 (advanced). | +| temperature | Float | Controls randomness in the output. Default: 0.0. | +| top_k | Integer | The number of highest probability vocabulary tokens to keep for top-k-filtering (advanced). | +| top_p | Float | The cumulative probability of parameter highest probability vocabulary tokens to keep for nucleus sampling. Default: 0.95 (advanced). | +| verbose | Boolean | Whether to print verbose output. Default: False (advanced). | -**Params** +#### Outputs +| Name | Type | Description | +|--------|---------------|-----------------------------------------------------| +| model | LanguageModel | An instance of ChatVertexAI configured with the specified parameters. | -- **Credentials:** The JSON file containing the credentials for accessing the Vertex AI Chat API. -- **Project:** The name of the project associated with the Vertex AI Chat API. -- **Examples (Optional):** List of examples to provide context for text generation. -- **Location:** The location of the Vertex AI Chat API service. Defaults to `us-central1`. -- **Max Output Tokens:** The maximum number of tokens to generate. Defaults to `128`. -- **Model Name:** The name of the model to use. Defaults to `chat-bison`. -- **Temperature:** Controls the creativity of model responses. Defaults to `0.0`. -- **Input Value:** The input text for text generation. -- **Top K:** Limits token selection to top K. Defaults to `40`. -- **Top P:** Works together with top-k. Defaults to `0.95`. -- **Verbose:** Whether to print out response text. Defaults to `False`. -- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to `False`. -- **System Message (Optional):** System message to pass to the model. diff --git a/docs/docs/Components/components-overview.md b/docs/docs/Components/components-overview.md new file mode 100644 index 000000000000..f1c28f7b39ed --- /dev/null +++ b/docs/docs/Components/components-overview.md @@ -0,0 +1,82 @@ +--- +title: How to build flows with components +sidebar_position: 0 +slug: /components-overview +--- + +A component is a single building block within a flow. It consists of inputs, outputs, and parameters that define their functionality. These elements provide a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work below. + + +During the flow creation process, you will notice handles (colored circles) attached to one or both sides of a component. These handles use distinct colors to indicate the types of inputs and outputs that can be interconnected. Hover over a handle to see connection details. + + +![](./565424296.png) + + +On the top right corner of the component, you'll find the a play button to run a component. Once it runs, a status icon appears and you can hover over that to visualize success or error messages. Start interacting with your AI by clicking the **Playground** at the bottom right of the workspace. + + +## Component menu {#7e3f2f8ff5074b2fb3eee97c9cfaabe7} + + +Each component is unique, but they all have a menu bar at the top that looks something like this. + + +![](./938852908.png) + + +It consists of options such as: + +- **Code** — displays the component's Python code. You can modify the code and save it. +- **Advanced** — See and adjust all parameters of a component. +- **Freeze** — After a component runs, lock its previous output state to prevent it from re-running. + +Click **All** (the "..." button) to see all options. + + +## Output preview {#ed7b3c34e0774b8a916b0e68821c9a7a} + + +Langflow includes an output visualizer for components that opens a pop-up screen. This allows you to easily inspect and monitor transmissions between components, providing instant feedback on your workflows. + + +![](./987204819.png) + + +## Advanced settings {#b6430d4903df44f0ba4618a558c83d7b} + + +Langflow components can be edited by clicking the **Advanced Settings** button. + + +Hide parameters with the **Show** button to reduce complexity and keep the workspace clean and intuitive for experimentation. + + +You can also double-click a component's name and description to modify those. Component descriptions accept markdown syntax. + + +## Group components {#c3f5ed818e3b40ceb6534dc358e1a5f2} + + +Multiple components can be grouped into a single component for reuse. This is useful when combining large flows into single components (like RAG with a vector database, for example) and saving space. + +1. Hold **Shift** and drag to select components. +2. Select **Group**. +3. The components merge into a single component. +4. Double-click the name and description to change them. +5. Save your grouped component to in the sidebar for later use! + +[group video here] + + +## Component version {#887fd587589448dc8c27336d1c235b9b} + + +A component's state is stored in a database, while sidebar components are like starter templates. As soon as you drag a component from the sidebar to the workspace, the two components are no longer in parity. + + +The component will keep the version number it was initialized to the workspace with. Click the **Update Component** icon (exclamation mark) to bring the component up to the `latest` version. This will change the code of the component in place so you can validate that the component was updated by checking its Python code before and after updating it. + + +![](./263391508.png) + diff --git a/docs/docs/Components/components-prompts.md b/docs/docs/Components/components-prompts.md index 9a3baa8c7af0..55ff7d747362 100644 --- a/docs/docs/Components/components-prompts.md +++ b/docs/docs/Components/components-prompts.md @@ -4,36 +4,43 @@ sidebar_position: 2 slug: /components-prompts --- +# Prompts +A prompt serves as the input to a language model, comprising multiple components that can be parameterized using prompt templates. -:::info +Prompt templates provide a systematic approach for generating prompts, allowing for reproducible customization through defined input variables. -This page may contain outdated information. It will be updated as soon as possible. +### Parameters -::: +#### Inputs +| Name | Display Name | Info | +|----------|--------------|-------------------------------------------------------------------| +| template | Template | Create a prompt template with dynamic variables. | +#### Outputs +| Name | Display Name | Info | +|--------|----------------|--------------------------------------------------------| +| prompt | Prompt Message | The built prompt message returned by the `build_prompt` method. | -A prompt is the input provided to a language model, consisting of multiple components and can be parameterized using prompt templates. A prompt template offers a reproducible method for generating prompts, enabling easy customization through input variables. +## Langchain Hub Prompt Template +This component fetches prompts from the [Langchain Hub](https://docs.smith.langchain.com/old/category/prompt-hub). -### Prompt {#c852d1761e6c46b19ce72e5f7c70958c} +When a prompt is loaded, the component generates input fields for custom variables. For example, the default prompt "efriis/my-first-prompt" generates fields for `profession` and `question`. +### Parameters -This component creates a prompt template with dynamic variables. This is useful for structuring prompts and passing dynamic data to a language model. +#### Inputs +| Name | Display Name | Info | +|--------------------|---------------------------|------------------------------------------| +| langchain_api_key | Your LangChain API Key | The LangChain API Key to use. | +| langchain_hub_prompt| LangChain Hub Prompt | The LangChain Hub prompt to use. | -**Parameters** +#### Outputs -- **Template:** The template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like `Hello {name}, how are you?`, a new field called `name` will be created. Prompt variables can be created with any name inside curly brackets, e.g. `{variable_name}`. - -### PromptTemplate {#6e32412f062b42efbdf56857eafb3651} - - -The `PromptTemplate` component enables users to create prompts and define variables that control how the model is instructed. Users can input a set of variables which the template uses to generate the prompt when a conversation starts. - - -After defining a variable in the prompt template, it acts as its own component input. - -- **template:** The template used to format an individual request. +| Name | Display Name | Info | +|--------|--------------|-------------------------------------------------------------------| +| prompt | Build Prompt | The built prompt message returned by the `build_prompt` method. | diff --git a/docs/docs/Components/components-rag.md b/docs/docs/Components/components-rag.md index df4b5691feb3..9af9468fbfa4 100644 --- a/docs/docs/Components/components-rag.md +++ b/docs/docs/Components/components-rag.md @@ -6,23 +6,31 @@ slug: /components-rag RAG (Retrieval-Augmented Generation) components process a user query by retrieving relevant documents and generating a concise summary that addresses the user's question. -### Vectara - -`Vectara` performs RAG using a Vectara corpus, including document retrieval, reranking results, and summary generation. - -**Parameters:** - -- **Vectara Customer ID:** Customer ID. -- **Vectara Corpus ID:** Corpus ID. -- **Vectara API Key:** API key. -- **Search Query:** User query. -- **Lexical Interpolation:** How much to weigh lexical vs. embedding scores. -- **Metadata Filters:** Filters to narrow down the search documents and parts. -- **Reranker Type:** How to rerank the retrieved results. -- **Number of Results to Rerank:** Maximum reranked results. -- **Diversity Bias:** How much to diversify retrieved results (only for MMR reranker). -- **Max Results to Summarize:** Maximum search results to provide to summarizer. -- **Response Language:** The language code (use ISO 639-1 or 639-3 codes) of the summary. -- **Prompt Name:** The summarizer prompt. - -For more information, consult the [Vectara documentation](https://docs.vectara.com/docs) +## Vectara RAG + +This component leverages Vectara's Retrieval Augmented Generation (RAG) capabilities to search and summarize documents based on the provided input. For more information, see the [Vectara documentation](https://docs.vectara.com/docs/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-----------------------|--------------|------------------------------------------------------------| +| vectara_customer_id | String | Vectara customer ID | +| vectara_corpus_id | String | Vectara corpus ID | +| vectara_api_key | SecretString | Vectara API key | +| search_query | String | The query to receive an answer on | +| lexical_interpolation | Float | Hybrid search factor (0.005 to 0.1) | +| filter | String | Metadata filters to narrow the search | +| reranker | String | Reranker type (mmr, rerank_multilingual_v1, none) | +| reranker_k | Integer | Number of results to rerank (1 to 100) | +| diversity_bias | Float | Diversity bias for MMR reranker (0 to 1) | +| max_results | Integer | Maximum number of search results to summarize (1 to 100) | +| response_lang | String | Language code for the response (e.g., "eng", "auto") | +| prompt | String | Prompt name for summarization | + +#### Outputs + +| Name | Type | Description | +|--------|---------|-----------------------| +| answer | Message | Generated RAG response| \ No newline at end of file diff --git a/docs/docs/Components/components-tools.md b/docs/docs/Components/components-tools.md new file mode 100644 index 000000000000..a838a9b3ec89 --- /dev/null +++ b/docs/docs/Components/components-tools.md @@ -0,0 +1,344 @@ +# Tools + +Tool components are used to interact with external services, APIs, and tools. They can be used to search the web, query databases, and perform other tasks. + +## Bing Search API + +This component allows you to call the Bing Search API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------|--------------|---------------------------------------| +| bing_subscription_key | SecretString | Bing API subscription key | +| input_value | String | Search query input | +| bing_search_url | String | Custom Bing Search URL (optional) | +| k | Integer | Number of search results to return | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|--------------------------------------| +| results | List[Data]| List of search results | +| tool | Tool | Bing Search tool for use in LangChain| + +## Calculator Tool + +This component creates a tool for performing basic arithmetic operations on a given expression. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------|--------|--------------------------------------------------------------------| +| expression | String | The arithmetic expression to evaluate (e.g., `4*4*(33/22)+12-20`). | + +#### Outputs + +| Name | Type | Description | +|--------|------|-------------------------------------------------| +| result | Tool | Calculator tool for use in LangChain | + +This component allows you to evaluate basic arithmetic expressions. It supports addition, subtraction, multiplication, division, and exponentiation. The tool uses a secure evaluation method that prevents the execution of arbitrary Python code. + +## Glean Search API + +This component allows you to call the Glean Search API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------|--------------|---------------------------------------| +| glean_api_url | String | URL of the Glean API | +| glean_access_token | SecretString | Access token for Glean API authentication | +| query | String | Search query input | +| page_size | Integer | Number of results per page (default: 10) | +| request_options | Dict | Additional options for the API request (optional) | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|--------------------------------------| +| results | List[Data]| List of search results | +| tool | Tool | Glean Search tool for use in LangChain| + +## Google Search API + +This component allows you to call the Google Search API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------|--------------|---------------------------------------| +| google_api_key | SecretString | Google API key for authentication | +| google_cse_id | SecretString | Google Custom Search Engine ID | +| input_value | String | Search query input | +| k | Integer | Number of search results to return | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|--------------------------------------| +| results | List[Data]| List of search results | +| tool | Tool | Google Search tool for use in LangChain| + +## Google Serper API + +This component allows you to call the Serper.dev Google Search API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------|--------------|---------------------------------------| +| serper_api_key | SecretString | API key for Serper.dev authentication | +| input_value | String | Search query input | +| k | Integer | Number of search results to return | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|--------------------------------------| +| results | List[Data]| List of search results | +| tool | Tool | Google Serper search tool for use in LangChain| + +## Python Code Structured Tool + +This component creates a structured tool from Python code using a dataclass. + +The component dynamically updates its configuration based on the provided Python code, allowing for custom function arguments and descriptions. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------|--------------|---------------------------------------| +| tool_code | String | Python code for the tool's dataclass | +| tool_name | String | Name of the tool | +| tool_description | String | Description of the tool | +| return_direct | Boolean | Whether to return the function output directly | +| tool_function | String | Selected function for the tool | +| global_variables | Dict | Global variables or data for the tool | + +#### Outputs + +| Name | Type | Description | +|-------------|-------|-----------------------------------------| +| result_tool | Tool │ Structured tool created from the Python code | + +## Python REPL Tool + +This component creates a Python REPL (Read-Eval-Print Loop) tool for executing Python code. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-----------------|--------------|--------------------------------------------------------| +| name | String | The name of the tool (default: "python_repl") | +| description | String | A description of the tool's functionality | +| global_imports | List[String] | List of modules to import globally (default: ["math"]) | + +#### Outputs + +| Name | Type | Description | +|------|------|--------------------------------------------| +| tool | Tool | Python REPL tool for use in LangChain | + +## Retriever Tool + +This component creates a tool for interacting with a retriever in LangChain. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------|---------------|---------------------------------------------| +| retriever | BaseRetriever | The retriever to interact with | +| name | String | The name of the tool | +| description | String | A description of the tool's functionality | + +#### Outputs + +| Name | Type | Description | +|------|------|--------------------------------------------| +| tool | Tool | Retriever tool for use in LangChain | + +## SearXNG Search Tool + +This component creates a tool for searching using SearXNG, a metasearch engine. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------|--------------|---------------------------------------| +| url | String | The URL of the SearXNG instance | +| max_results | Integer | Maximum number of results to return | +| categories | List[String] | Categories to search in | +| language | String | Language for the search results | + +#### Outputs + +| Name | Type | Description | +|-------------|------|--------------------------------------------| +| result_tool | Tool | SearXNG search tool for use in LangChain | + +## Search API + +This component calls the `searchapi.io` API. It can be used to search the web for information. + +For more information, see the [SearchAPI documentation](https://www.searchapi.io/docs/google). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|----------------|---------------------|-----------------------------------------------------| +| engine | Engine | The search engine to use (default: "google") | +| api_key | SearchAPI API Key | The API key for authenticating with SearchAPI | +| input_value | Input | The search query or input for the API call | +| search_params | Search parameters | Additional parameters for customizing the search | + +#### Outputs + +| Name | Display Name | Info | +|------|-----------------|------------------------------------------------------| +| data | Search Results | List of Data objects containing search results | +| tool | Search API Tool | A Tool object for use in LangChain workflows | + +## Serp Search API + +This component creates a tool for searching using the Serp API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------|--------------|---------------------------------------------| +| serpapi_api_key | SecretString | API key for Serp API authentication | +| input_value | String | Search query input | +| search_params | Dict | Additional search parameters (optional) | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|---------------------------------------------| +| results | List[Data]| List of search results | +| tool | Tool | Serp API search tool for use in LangChain | + +## Wikipedia API + +This component creates a tool for searching and retrieving information from Wikipedia. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------------------|---------|-----------------------------------------------------------| +| input_value | String | Search query input | +| lang | String | Language code for Wikipedia (default: "en") | +| k | Integer | Number of results to return | +| load_all_available_meta | Boolean | Whether to load all available metadata (advanced) | +| doc_content_chars_max | Integer | Maximum number of characters for document content (advanced)| + +#### Outputs + +| Name | Type | Description | +|---------|-----------|---------------------------------------| +| results | List[Data]| List of Wikipedia search results | +| tool | Tool | Wikipedia search tool for use in LangChain | + +## Wolfram Alpha API + +This component creates a tool for querying the Wolfram Alpha API. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------|--------------|--------------------------------| +| input_value | String | Query input for Wolfram Alpha | +| app_id | SecretString | Wolfram Alpha API App ID | + +#### Outputs + +| Name | Type | Description | +|---------|-----------|------------------------------------------------| +| results | List[Data]| List containing the Wolfram Alpha API response | +| tool | Tool | Wolfram Alpha API tool for use in LangChain | + +## Yahoo Finance News Tool + +This component creates a tool for retrieving news from Yahoo Finance. + +### Parameters + +This component does not have any input parameters. + +#### Outputs + +| Name | Type | Description | +|------|------|----------------------------------------------| +| tool | Tool | Yahoo Finance News tool for use in LangChain | + + +## Astra DB Tool + +The `Astra DB Tool` allows agents to connect to and query data from Astra DB Collections. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------------|--------|----------------------------------------------------------------------------------------------------------------------------------| +| Tool Name | String | The name used to reference the tool in the agent's prompt. | +| Tool Description | String | A brief description of the tool. This helps the model decide when to use it. | +| Collection Name | String | The name of the Astra DB collection to query. | +| Token | SecretString | The authentication token for accessing Astra DB. | +| API Endpoint | String | The Astra DB API endpoint. | +| Projection Fields | String | The attributes to return, separated by commas. Default: "*". | +| Tool Parameters | Dict | Parameters the model needs to fill to execute the tool. For required parameters, use an exclamation mark (e.g., "!customer_id"). | +| Static Filters | Dict | Attribute-value pairs used to filter query results. | +| Limit | String | The number of documents to return. | + + + +## Astra DB CQL Tool + +The `Astra DB CQL Tool` allows agents to query data from CQL Tables in Astra DB. + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------| +| Tool Name | String | The name used to reference the tool in the agent's prompt. | +| Tool Description | String | A brief description of the tool to guide the model in using it. | +| Keyspace | String | The name of the keyspace. | +| Table Name | String | The name of the Astra DB CQL table to query. | +| Token | SecretString | The authentication token for Astra DB. | +| API Endpoint | String | The Astra DB API endpoint. | +| Projection Fields | String | The attributes to return, separated by commas. Default: "*". | +| Partition Keys | Dict | Required parameters that the model must fill to query the tool. | +| Clustering Keys | Dict | Optional parameters the model can fill to refine the query. Required parameters should be marked with an exclamation mark (e.g., "!customer_id"). | +| Static Filters | Dict | Attribute-value pairs used to filter query results. | +| Limit | String | The number of records to return. | diff --git a/docs/docs/Components/components-vector-stores.md b/docs/docs/Components/components-vector-stores.md index 02a219f42448..713841e01b5e 100644 --- a/docs/docs/Components/components-vector-stores.md +++ b/docs/docs/Components/components-vector-stores.md @@ -3,586 +3,619 @@ title: Vector Stores sidebar_position: 7 slug: /components-vector-stores --- +# Vector Stores + +Vector databases are used to store and search for vectors. They can be used to store embeddings, search for similar vectors, and perform other vector operations. + +## Astra DB Vector Store + +This component implements a Vector Store using Astra DB with search capabilities. + +For more information, see the [DataStax documentation](https://docs.datastax.com/en/astra-db-serverless/databases/create-database.html). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| collection_name | Collection Name | The name of the collection within Astra DB where the vectors will be stored (required) | +| token | Astra DB Application Token | Authentication token for accessing Astra DB (required) | +| api_endpoint | API Endpoint | API endpoint URL for the Astra DB service (required) | +| search_input | Search Input | Query string for similarity search | +| ingest_data | Ingest Data | Data to be ingested into the vector store | +| namespace | Namespace | Optional namespace within Astra DB to use for the collection | +| embedding_service | Embedding Model or Astra Vectorize | Determines whether to use an Embedding Model or Astra Vectorize for the collection | +| embedding | Embedding Model | Allows an embedding model configuration (when using Embedding Model) | +| provider | Vectorize Provider | Provider for Astra Vectorize (when using Astra Vectorize) | +| metric | Metric | Optional distance metric for vector comparisons | +| batch_size | Batch Size | Optional number of data to process in a single batch | +| setup_mode | Setup Mode | Configuration mode for setting up the vector store (options: "Sync", "Async", "Off", default: "Sync") | +| pre_delete_collection | Pre Delete Collection | Boolean flag to determine whether to delete the collection before creating a new one | +| number_of_results | Number of Results | Number of results to return in similarity search (default: 4) | +| search_type | Search Type | Search type to use (options: "Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)") | +| search_score_threshold | Search Score Threshold | Minimum similarity score threshold for search results | +| search_filter | Search Metadata Filter | Optional dictionary of filters to apply to the search query | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| vector_store | Vector Store | Built Astra DB vector store | +| search_results | Search Results | Results of the similarity search as a list of Data objects | + +## Cassandra + +This component creates a Cassandra Vector Store with search capabilities. +For more information, see the [Cassandra documentation](https://cassandra.apache.org/doc/latest/cassandra/vector-search/overview.html). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------|------|-------------| +| database_ref | String | Contact points for the database or AstraDB database ID | +| username | String | Username for the database (leave empty for AstraDB) | +| token | SecretString | User password for the database or AstraDB token | +| keyspace | String | Table Keyspace or AstraDB namespace | +| table_name | String | Name of the table or AstraDB collection | +| ttl_seconds | Integer | Time-to-live for added texts | +| batch_size | Integer | Number of data to process in a single batch | +| setup_mode | String | Configuration mode for setting up the Cassandra table | +| cluster_kwargs | Dict | Additional keyword arguments for the Cassandra cluster | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | +| search_type | String | Type of search to perform | +| search_score_threshold | Float | Minimum similarity score for search results | +| search_filter | Dict | Metadata filters for search query | +| body_search | String | Document textual search terms | +| enable_body_search | Boolean | Flag to enable body search | + +#### Outputs + +| Name | Type | Description | +|------|------|-------------| +| vector_store | Cassandra | Cassandra vector store instance | +| search_results | List[Data] | Results of similarity search | + +## Cassandra Graph Vector Store + +This component implements a Cassandra Graph Vector Store with search capabilities. + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| database_ref | Contact Points / Astra Database ID | Contact points for the database or AstraDB database ID (required) | +| username | Username | Username for the database (leave empty for AstraDB) | +| token | Password / AstraDB Token | User password for the database or AstraDB token (required) | +| keyspace | Keyspace | Table Keyspace or AstraDB namespace (required) | +| table_name | Table Name | The name of the table or AstraDB collection where vectors will be stored (required) | +| setup_mode | Setup Mode | Configuration mode for setting up the Cassandra table (options: "Sync", "Off", default: "Sync") | +| cluster_kwargs | Cluster arguments | Optional dictionary of additional keyword arguments for the Cassandra cluster | +| search_query | Search Query | Query string for similarity search | +| ingest_data | Ingest Data | Data to be ingested into the vector store (list of Data objects) | +| embedding | Embedding | Embedding model to use | +| number_of_results | Number of Results | Number of results to return in similarity search (default: 4) | +| search_type | Search Type | Search type to use (options: "Traversal", "MMR traversal", "Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)", default: "Traversal") | +| depth | Depth of traversal | The maximum depth of edges to traverse (for "Traversal" or "MMR traversal" search types, default: 1) | +| search_score_threshold | Search Score Threshold | Minimum similarity score threshold for search results (for "Similarity with score threshold" search type) | +| search_filter | Search Metadata Filter | Optional dictionary of filters to apply to the search query | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| vector_store | Vector Store | Built Cassandra Graph vector store | +| search_results | Search Results | Results of the similarity search as a list of Data objects | + +## Chroma DB + +This component creates a Chroma Vector Store with search capabilities. +For more information, see the [Chroma documentation](https://docs.trychroma.com/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|------------------------------|---------------|--------------------------------------------------| +| collection_name | String | The name of the Chroma collection. Default: "langflow". | +| persist_directory | String | The directory to persist the Chroma database. | +| search_query | String | The query to search for in the vector store. | +| ingest_data | Data | The data to ingest into the vector store (list of Data objects). | +| embedding | Embeddings | The embedding function to use for the vector store. | +| chroma_server_cors_allow_origins | String | CORS allow origins for the Chroma server. | +| chroma_server_host | String | Host for the Chroma server. | +| chroma_server_http_port | Integer | HTTP port for the Chroma server. | +| chroma_server_grpc_port | Integer | gRPC port for the Chroma server. | +| chroma_server_ssl_enabled | Boolean | Enable SSL for the Chroma server. | +| allow_duplicates | Boolean | Allow duplicate documents in the vector store. | +| search_type | String | Type of search to perform: "Similarity" or "MMR". | +| number_of_results | Integer | Number of results to return from the search. Default: 10. | +| limit | Integer | Limit the number of records to compare when Allow Duplicates is False. | + +#### Outputs + +| Name | Type | Description | +|----------------|---------------|--------------------------------| +| vector_store | Chroma | Chroma vector store instance | +| search_results | List[Data] | Results of similarity search | + +## Clickhouse + +This component implements a Clickhouse Vector Store with search capabilities. +For more information, see the [CLickhouse Documentation](https://clickhouse.com/docs/en/intro). + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| host | hostname | Clickhouse server hostname (required, default: "localhost") | +| port | port | Clickhouse server port (required, default: 8123) | +| database | database | Clickhouse database name (required) | +| table | Table name | Clickhouse table name (required) | +| username | The ClickHouse user name. | Username for authentication (required) | +| password | The password for username. | Password for authentication (required) | +| index_type | index_type | Type of the index (options: "annoy", "vector_similarity", default: "annoy") | +| metric | metric | Metric to compute distance (options: "angular", "euclidean", "manhattan", "hamming", "dot", default: "angular") | +| secure | Use https/TLS | Overrides inferred values from the interface or port arguments (default: false) | +| index_param | Param of the index | Index parameters (default: "'L2Distance',100") | +| index_query_params | index query params | Additional index query parameters | +| search_query | Search Query | Query string for similarity search | +| ingest_data | Ingest Data | Data to be ingested into the vector store | +| embedding | Embedding | Embedding model to use | +| number_of_results | Number of Results | Number of results to return in similarity search (default: 4) | +| score_threshold | Score threshold | Threshold for similarity scores | + +#### Outputs + +| Name | Display Name | Info | +|------|--------------|------| +| vector_store | Vector Store | Built Clickhouse vector store | +| search_results | Search Results | Results of the similarity search as a list of Data objects | + +## Couchbase + +This component creates a Couchbase Vector Store with search capabilities. +For more information, see the [Couchbase documentation](https://docs.couchbase.com/home/index.html). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------------------|---------------|--------------------------------------------------| +| couchbase_connection_string | SecretString | Couchbase Cluster connection string (required). | +| couchbase_username | String | Couchbase username (required). | +| couchbase_password | SecretString | Couchbase password (required). | +| bucket_name | String | Name of the Couchbase bucket (required). | +| scope_name | String | Name of the Couchbase scope (required). | +| collection_name | String | Name of the Couchbase collection (required). | +| index_name | String | Name of the Couchbase index (required). | +| search_query | String | The query to search for in the vector store. | +| ingest_data | Data | The data to ingest into the vector store (list of Data objects). | +| embedding | Embeddings | The embedding function to use for the vector store. | +| number_of_results | Integer | Number of results to return from the search. Default: 4 (advanced). | + +#### Outputs + +| Name | Type | Description | +|----------------|------------------------|--------------------------------| +| vector_store | CouchbaseVectorStore | A Couchbase vector store instance configured with the specified parameters. | + +## FAISS + +This component creates a FAISS Vector Store with search capabilities. +For more information, see the [FAISS documentation](https://faiss.ai/index.html). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|---------------------------|---------------|--------------------------------------------------| +| index_name | String | The name of the FAISS index. Default: "langflow_index". | +| persist_directory | String | Path to save the FAISS index. It will be relative to where Langflow is running. | +| search_query | String | The query to search for in the vector store. | +| ingest_data | Data | The data to ingest into the vector store (list of Data objects or documents). | +| allow_dangerous_deserialization | Boolean | Set to True to allow loading pickle files from untrusted sources. Default: True (advanced). | +| embedding | Embeddings | The embedding function to use for the vector store. | +| number_of_results | Integer | Number of results to return from the search. Default: 4 (advanced). | + +#### Outputs + +| Name | Type | Description | +|----------------|------------------------|--------------------------------| +| vector_store | FAISS | A FAISS vector store instance configured with the specified parameters. | + +## Hyper-Converged Database (HCD) Vector Store + +This component implements a Vector Store using HCD. + +### Parameters + +#### Inputs + +| Name | Display Name | Info | +|------|--------------|------| +| collection_name | Collection Name | The name of the collection within HCD where the vectors will be stored (required) | +| username | HCD Username | Authentication username for accessing HCD (default: "hcd-superuser", required) | +| password | HCD Password | Authentication password for accessing HCD (required) | +| api_endpoint | HCD API Endpoint | API endpoint URL for the HCD service (required) | +| search_input | Search Input | Query string for similarity search | +| ingest_data | Ingest Data | Data to be ingested into the vector store | +| namespace | Namespace | Optional namespace within HCD to use for the collection (default: "default_namespace") | +| ca_certificate | CA Certificate | Optional CA certificate for TLS connections to HCD | +| metric | Metric | Optional distance metric for vector comparisons (options: "cosine", "dot_product", "euclidean") | +| batch_size | Batch Size | Optional number of data to process in a single batch | +| bulk_insert_batch_concurrency | Bulk Insert Batch Concurrency | Optional concurrency level for bulk insert operations | +| bulk_insert_overwrite_concurrency | Bulk Insert Overwrite Concurrency | Optional concurrency level for bulk insert operations that overwrite existing data | +| bulk_delete_concurrency | Bulk Delete Concurrency | Optional concurrency level for bulk delete operations | +| setup_mode | Setup Mode | Configuration mode for setting up the vector store (options: "Sync", "Async", "Off", default: "Sync") | +| pre_delete_collection | Pre Delete Collection | Boolean flag to determine whether to delete the collection before creating a new one | +| metadata_indexing_include | Metadata Indexing Include | Optional list of metadata fields to include in the indexing | +| embedding | Embedding or Astra Vectorize | Allows either an embedding model or an Astra Vectorize configuration | +| metadata_indexing_exclude | Metadata Indexing Exclude | Optional list of metadata fields to exclude from the indexing | +| collection_indexing_policy | Collection Indexing Policy | Optional dictionary defining the indexing policy for the collection | +| number_of_results | Number of Results | Number of results to return in similarity search (default: 4) | +| search_type | Search Type | Search type to use (options: "Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)", default: "Similarity") | +| search_score_threshold | Search Score Threshold | Minimum similarity score threshold for search results (default: 0) | +| search_filter | Search Metadata Filter | Optional dictionary of filters to apply to the search query | + +#### Outputs +| Name | Display Name | Info | +|------|--------------|------| +| vector_store | Vector Store | Built HCD vector store instance | +| search_results | Search Results | Results of similarity search as a list of Data objects | +## Milvus + +This component creates a Milvus Vector Store with search capabilities. +For more information, see the [Milvus documentation](https://milvus.io/docs). + +### Parameters + +#### Inputs + +| Name | Type | Description | +|-------------------------|---------------|--------------------------------------------------| +| collection_name | String | Name of the Milvus collection | +| collection_description | String | Description of the Milvus collection | +| uri | String | Connection URI for Milvus | +| password | SecretString | Password for Milvus | +| username | SecretString | Username for Milvus | +| batch_size | Integer | Number of data to process in a single batch | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | +| search_type | String | Type of search to perform | +| search_score_threshold | Float | Minimum similarity score for search results | +| search_filter | Dict | Metadata filters for search query | +| setup_mode | String | Configuration mode for setting up the vector store | +| vector_dimensions | Integer | Number of dimensions of the vectors | +| pre_delete_collection | Boolean | Whether to delete the collection before creating a new one | -:::info +#### Outputs -This page may contain outdated information. It will be updated as soon as possible. +| Name | Type | Description | +|----------------|------------------------|--------------------------------| +| vector_store | Milvus | A Milvus vector store instance configured with the specified parameters. | -::: +## MongoDB Atlas + +This component creates a MongoDB Atlas Vector Store with search capabilities. +For more information, see the [MongoDB Atlas documentation](https://www.mongodb.com/docs/atlas/atlas-vector-search/tutorials/vector-search-quick-start/). + +### Parameters + +#### Inputs + +| Name | Type | Description | +| ------------------------ | ------------ | ----------------------------------------- | +| mongodb_atlas_cluster_uri | SecretString | MongoDB Atlas Cluster URI | +| db_name | String | Database name | +| collection_name | String | Collection name | +| index_name | String | Index name | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | + +#### Outputs + +| Name | Type | Description | +| ------------- | ---------------------- | ----------------------------------------- | +| vector_store | MongoDBAtlasVectorSearch| MongoDB Atlas vector store instance | +| search_results| List[Data] | Results of similarity search | + + +## PGVector + +This component creates a PGVector Vector Store with search capabilities. +For more information, see the [PGVector documentation](https://github.com/pgvector/pgvector). + +### Parameters + +#### Inputs + +| Name | Type | Description | +| --------------- | ------------ | ----------------------------------------- | +| pg_server_url | SecretString | PostgreSQL server connection string | +| collection_name | String | Table name for the vector store | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | + +#### Outputs +| Name | Type | Description | +| ------------- | ----------- | ----------------------------------------- | +| vector_store | PGVector | PGVector vector store instance | +| search_results| List[Data] | Results of similarity search | +## Pinecone -### Astra DB {#453bcf5664154e37a920f1b602bd39da} +This component creates a Pinecone Vector Store with search capabilities. +For more information, see the [Pinecone documentation](https://docs.pinecone.io/home). +### Parameters -The `Astra DB` initializes a vector store using Astra DB from Data. It creates Astra DB-based vector indexes to efficiently store and retrieve documents. +#### Inputs +| Name | Type | Description | +| ----------------- | ------------ | ----------------------------------------- | +| index_name | String | Name of the Pinecone index | +| namespace | String | Namespace for the index | +| distance_strategy | String | Strategy for calculating distance between vectors | +| pinecone_api_key | SecretString | API key for Pinecone | +| text_key | String | Key in the record to use as text | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | -**Parameters:** +#### Outputs -- **Input:** Documents or Data for input. -- **Embedding or Astra vectorize:** External or server-side model Astra DB uses. -- **Collection Name:** Name of the Astra DB collection. -- **Token:** Authentication token for Astra DB. -- **API Endpoint:** API endpoint for Astra DB. -- **Namespace:** Astra DB namespace. -- **Metric:** Metric used by Astra DB. -- **Batch Size:** Batch size for operations. -- **Bulk Insert Batch Concurrency:** Concurrency level for bulk inserts. -- **Bulk Insert Overwrite Concurrency:** Concurrency level for overwriting during bulk inserts. -- **Bulk Delete Concurrency:** Concurrency level for bulk deletions. -- **Setup Mode:** Setup mode for the vector store. -- **Pre Delete Collection:** Option to delete the collection before setup. -- **Metadata Indexing Include:** Fields to include in metadata indexing. -- **Metadata Indexing Exclude:** Fields to exclude from metadata indexing. -- **Collection Indexing Policy:** Indexing policy for the collection. +| Name | Type | Description | +| ------------- | ---------- | ----------------------------------------- | +| vector_store | Pinecone | Pinecone vector store instance | +| search_results| List[Data] | Results of similarity search | -NOTE +## Qdrant -Ensure you configure the necessary Astra DB token and API endpoint before starting. +This component creates a Qdrant Vector Store with search capabilities. +For more information, see the [Qdrant documentation](https://qdrant.tech/documentation/). +### Parameters ---- - - -### Astra DB Search {#26f25d1933a9459bad2d6725f87beb11} - - -`Astra DBSearch` searches an existing Astra DB vector store for documents similar to the input. It uses the `Astra DB`component's functionality for efficient retrieval. - - -**Parameters:** - -- **Search Type:** Type of search, such as Similarity or MMR. -- **Input Value:** Value to search for. -- **Embedding or Astra vectorize:** External or server-side model Astra DB uses. -- **Collection Name:** Name of the Astra DB collection. -- **Token:** Authentication token for Astra DB. -- **API Endpoint:** API endpoint for Astra DB. -- **Namespace:** Astra DB namespace. -- **Metric:** Metric used by Astra DB. -- **Batch Size:** Batch size for operations. -- **Bulk Insert Batch Concurrency:** Concurrency level for bulk inserts. -- **Bulk Insert Overwrite Concurrency:** Concurrency level for overwriting during bulk inserts. -- **Bulk Delete Concurrency:** Concurrency level for bulk deletions. -- **Setup Mode:** Setup mode for the vector store. -- **Pre Delete Collection:** Option to delete the collection before setup. -- **Metadata Indexing Include:** Fields to include in metadata indexing. -- **Metadata Indexing Exclude:** Fields to exclude from metadata indexing. -- **Collection Indexing Policy:** Indexing policy for the collection. - ---- - - -### Chroma {#74730795605143cba53e1f4c4f2ef5d6} - - -`Chroma` sets up a vector store using Chroma for efficient vector storage and retrieval within language processing workflows. - - -**Parameters:** - -- **Collection Name:** Name of the collection. -- **Persist Directory:** Directory to persist the Vector Store. -- **Server CORS Allow Origins (Optional):** CORS allow origins for the Chroma server. -- **Server Host (Optional):** Host for the Chroma server. -- **Server Port (Optional):** Port for the Chroma server. -- **Server gRPC Port (Optional):** gRPC port for the Chroma server. -- **Server SSL Enabled (Optional):** SSL configuration for the Chroma server. -- **Input:** Input data for creating the Vector Store. -- **Embedding:** Embeddings used for the Vector Store. - -For detailed documentation and integration guides, please refer to the [Chroma Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/chroma). - - ---- - - -### Chroma Search {#5718072a155441f3a443b944ad4d638f} - - -`ChromaSearch` searches a Chroma collection for documents similar to the input text. It leverages Chroma to ensure efficient document retrieval. - - -**Parameters:** - -- **Input:** Input text for search. -- **Search Type:** Type of search, such as Similarity or MMR. -- **Collection Name:** Name of the Chroma collection. -- **Index Directory:** Directory where the Chroma index is stored. -- **Embedding:** Embedding model used for vectorization. -- **Server CORS Allow Origins (Optional):** CORS allow origins for the Chroma server. -- **Server Host (Optional):** Host for the Chroma server. -- **Server Port (Optional):** Port for the Chroma server. -- **Server gRPC Port (Optional):** gRPC port for the Chroma server. -- **Server SSL Enabled (Optional):** SSL configuration for the Chroma server. - ---- - - -### Couchbase {#6900a79347164f35af27ae27f0d64a6d} - - -`Couchbase` builds a Couchbase vector store from Data, streamlining the storage and retrieval of documents. - - -**Parameters:** - -- **Embedding:** Model used by Couchbase. -- **Input:** Documents or Data. -- **Couchbase Cluster Connection String:** Cluster Connection string. -- **Couchbase Cluster Username:** Cluster Username. -- **Couchbase Cluster Password:** Cluster Password. -- **Bucket Name:** Bucket identifier in Couchbase. -- **Scope Name:** Scope identifier in Couchbase. -- **Collection Name:** Collection identifier in Couchbase. -- **Index Name:** Index identifier. - -For detailed documentation and integration guides, please refer to the [Couchbase Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/couchbase). - - ---- - - -### Couchbase Search {#c77bb09425a3426f9677d38d8237d9ba} - - -`CouchbaseSearch` leverages the Couchbase component to search for documents based on similarity metric. - - -**Parameters:** - -- **Input:** Search query. -- **Embedding:** Model used in the Vector Store. -- **Couchbase Cluster Connection String:** Cluster Connection string. -- **Couchbase Cluster Username:** Cluster Username. -- **Couchbase Cluster Password:** Cluster Password. -- **Bucket Name:** Bucket identifier. -- **Scope Name:** Scope identifier. -- **Collection Name:** Collection identifier in Couchbase. -- **Index Name:** Index identifier. - ---- - - -### FAISS {#5b3f4e6592a847b69e07df2f674a03f0} - - -The `FAISS` component manages document ingestion into a FAISS Vector Store, optimizing document indexing and retrieval. - - -**Parameters:** - -- **Embedding:** Model used for vectorizing inputs. -- **Input:** Documents to ingest. -- **Folder Path:** Save path for the FAISS index, relative to Langflow. - -For more details, see the [FAISS Component Documentation](https://faiss.ai/index.html). - - ---- - - -### FAISS Search {#81ff12d7205940a3b14e3ddf304630f8} - - -`FAISSSearch` searches a FAISS Vector Store for documents similar to a given input, using similarity metrics for efficient retrieval. - - -**Parameters:** - -- **Embedding:** Model used in the FAISS Vector Store. -- **Folder Path:** Path to load the FAISS index from, relative to Langflow. -- **Input:** Search query. -- **Index Name:** Index identifier. - ---- - - -### MongoDB Atlas {#eba8892f7a204b97ad1c353e82948149} - - -`MongoDBAtlas` builds a MongoDB Atlas-based vector store from Data, streamlining the storage and retrieval of documents. - - -**Parameters:** - -- **Embedding:** Model used by MongoDB Atlas. -- **Input:** Documents or Data. -- **Collection Name:** Collection identifier in MongoDB Atlas. -- **Database Name:** Database identifier. -- **Index Name:** Index identifier. -- **MongoDB Atlas Cluster URI:** Cluster URI. -- **Search Kwargs:** Additional search parameters. - -NOTE - - -Ensure pymongo is installed for using MongoDB Atlas Vector Store. - - ---- - - -### MongoDB Atlas Search {#686ba0e30a54438cbc7153b81ee4b1df} - - -`MongoDBAtlasSearch` leverages the MongoDBAtlas component to search for documents based on similarity metrics. - - -**Parameters:** - -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input:** Search query. -- **Embedding:** Model used in the Vector Store. -- **Collection Name:** Collection identifier. -- **Database Name:** Database identifier. -- **Index Name:** Index identifier. -- **MongoDB Atlas Cluster URI:** Cluster URI. -- **Search Kwargs:** Additional search parameters. - ---- - - -### PGVector {#7ceebdd84ab14f8e8589c13c58370e5b} - - -`PGVector` integrates a Vector Store within a PostgreSQL database, allowing efficient storage and retrieval of vectors. - - -**Parameters:** - -- **Input:** Value for the Vector Store. -- **Embedding:** Model used. -- **PostgreSQL Server Connection String:** Server URL. -- **Table:** Table name in the PostgreSQL database. - -For more details, see the [PGVector Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/pgvector). - - -NOTE - - -Ensure the PostgreSQL server is accessible and configured correctly. +#### Inputs +| Name | Type | Description | +| -------------------- | ------------ | ----------------------------------------- | +| collection_name | String | Name of the Qdrant collection | +| host | String | Qdrant server host | +| port | Integer | Qdrant server port | +| grpc_port | Integer | Qdrant gRPC port | +| api_key | SecretString | API key for Qdrant | +| prefix | String | Prefix for Qdrant | +| timeout | Integer | Timeout for Qdrant operations | +| path | String | Path for Qdrant | +| url | String | URL for Qdrant | +| distance_func | String | Distance function for vector similarity | +| content_payload_key | String | Key for content payload | +| metadata_payload_key | String | Key for metadata payload | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | ---- - - -### PGVector Search {#196bf22ea2844bdbba971b5082750943} - - -`PGVectorSearch` extends `PGVector` to search for documents based on similarity metrics. - - -**Parameters:** - -- **Input:** Search query. -- **Embedding:** Model used. -- **PostgreSQL Server Connection String:** Server URL. -- **Table:** Table name. -- **Search Type:** Type of search, such as "Similarity" or "MMR". - ---- - - -### Pinecone {#67abbe3e27c34fb4bcb35926ce831727} - - -`Pinecone` constructs a Pinecone wrapper from Data, setting up Pinecone-based vector indexes for document storage and retrieval. - - -**Parameters:** - -- **Input:** Documents or Data. -- **Embedding:** Model used. -- **Index Name:** Index identifier. -- **Namespace:** Namespace used. -- **Pinecone API Key:** API key. -- **Pinecone Environment:** Environment settings. -- **Search Kwargs:** Additional search parameters. -- **Pool Threads:** Number of threads. - -:::info - -Ensure the Pinecone API key and environment are correctly configured. - -::: - - - - ---- - - -### Pinecone Search {#977944558cad4cf2ba332ea4f06bf485} - - -`PineconeSearch` searches a Pinecone Vector Store for documents similar to the input, using advanced similarity metrics. - - -**Parameters:** - -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Embedding:** Model used. -- **Index Name:** Index identifier. -- **Namespace:** Namespace used. -- **Pinecone API Key:** API key. -- **Pinecone Environment:** Environment settings. -- **Search Kwargs:** Additional search parameters. -- **Pool Threads:** Number of threads. - ---- - - -### Qdrant {#88df77f3044e4ac6980950835a919fb0} - - -`Qdrant` allows efficient similarity searches and retrieval operations, using a list of texts to construct a Qdrant wrapper. - - -**Parameters:** - -- **Input:** Documents or Data. -- **Embedding:** Model used. -- **API Key:** Qdrant API key. -- **Collection Name:** Collection identifier. -- **Advanced Settings:** Includes content payload key, distance function, gRPC port, host, HTTPS, location, metadata payload key, path, port, prefer gRPC, prefix, search kwargs, timeout, URL. - ---- - - -### Qdrant Search {#5ba5f8dca0f249d7ad00778f49901e6c} - - -`QdrantSearch` extends `Qdrant` to search for documents similar to the input based on advanced similarity metrics. - - -**Parameters:** - -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Embedding:** Model used. -- **API Key:** Qdrant API key. -- **Collection Name:** Collection identifier. -- **Advanced Settings:** Includes content payload key, distance function, gRPC port, host, HTTPS, location, metadata payload key, path, port, prefer gRPC, prefix, search kwargs, timeout, URL. - ---- +#### Outputs +| Name | Type | Description | +| ------------- | -------- | ----------------------------------------- | +| vector_store | Qdrant | Qdrant vector store instance | +| search_results| List[Data] | Results of similarity search | -### Redis {#a0fb8a9d244a40eb8439d0f8c22a2562} +## Redis -`Redis` manages a Vector Store in a Redis database, supporting efficient vector storage and retrieval. +This component creates a Redis Vector Store with search capabilities. +For more information, see the [Redis documentation](https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/vectors/). +### Parameters -**Parameters:** +#### Inputs -- **Index Name:** Default index name. -- **Input:** Data for building the Redis Vector Store. -- **Embedding:** Model used. -- **Schema:** Optional schema file (.yaml) for document structure. -- **Redis Server Connection String:** Server URL. -- **Redis Index:** Optional index name. +| Name | Type | Description | +| ----------------- | ------------ | ----------------------------------------- | +| redis_server_url | SecretString | Redis server connection string | +| redis_index_name | String | Name of the Redis index | +| code | String | Custom code for Redis (advanced) | +| schema | String | Schema for Redis index | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| number_of_results | Integer | Number of results to return in search | +| embedding | Embeddings | Embedding function to use | -For detailed documentation, refer to the [Redis Documentation](https://python.langchain.com/docs/integrations/vectorstores/redis). +#### Outputs +| Name | Type | Description | +| ------------- | -------- | ----------------------------------------- | +| vector_store | Redis | Redis vector store instance | +| search_results| List[Data]| Results of similarity search | -:::info -Ensure the Redis server URL and index name are configured correctly. Provide a schema if no documents are available. +## Supabase -::: +This component creates a connection to a Supabase Vector Store with search capabilities. +For more information, see the [Supabase documentation](https://supabase.com/docs/guides/ai). +### Parameters +#### Inputs +| Name | Type | Description | +| ------------------- | ------------ | ----------------------------------------- | +| supabase_url | String | URL of the Supabase instance | +| supabase_service_key| SecretString | Service key for Supabase authentication | +| table_name | String | Name of the table in Supabase | +| query_name | String | Name of the query to use | +| search_query | String | Query for similarity search | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use | +| number_of_results | Integer | Number of results to return in search | ---- - - -### Redis Search {#80aea4da515f490e979c8576099ee880} - - -`RedisSearch` searches a Redis Vector Store for documents similar to the input. - - -**Parameters:** - -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Index Name:** Default index name. -- **Embedding:** Model used. -- **Schema:** Optional schema file (.yaml) for document structure. -- **Redis Server Connection String:** Server URL. -- **Redis Index:** Optional index name. - ---- - - -### Supabase {#e86fb3cc507e4b5494f0a421f94e853b} - - -`Supabase` initializes a Supabase Vector Store from texts and embeddings, setting up an environment for efficient document retrieval. - - -**Parameters:** - -- **Input:** Documents or data. -- **Embedding:** Model used. -- **Query Name:** Optional query name. -- **Search Kwargs:** Advanced search parameters. -- **Supabase Service Key:** Service key. -- **Supabase URL:** Instance URL. -- **Table Name:** Optional table name. - -:::info - -Ensure the Supabase service key, URL, and table name are properly configured. - -::: - - - - ---- - +#### Outputs -### Supabase Search {#fd02d550b9b2457f91f2f4073656cb09} +| Name | Type | Description | +| ------------- | ------------------ | ----------------------------------------- | +| vector_store | SupabaseVectorStore | Supabase vector store instance | +| search_results| List[Data] | Results of similarity search | -`SupabaseSearch` searches a Supabase Vector Store for documents similar to the input. +## Upstash +This component creates an Upstash Vector Store with search capabilities. +For more information, see the [Upstash documentation](https://upstash.com/docs/introduction). -**Parameters:** +### Parameters -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Embedding:** Model used. -- **Query Name:** Optional query name. -- **Search Kwargs:** Advanced search parameters. -- **Supabase Service Key:** Service key. -- **Supabase URL:** Instance URL. -- **Table Name:** Optional table name. - ---- - - -### Upstash Vector - - -`UpstashVector` searches a Upstash Vector Store for documents similar to the input. It has it's own embedding -model which can be used to search documents without needing an external embedding model. - - -**Parameters:** - -- **Index URL:** The URL of the Upstash index. -- **Index Token:** The token for the Upstash index. -- **Text Key:** The key in the record to use as text. -- **Namespace:** The namespace name. A new namespace is created if not found. Leave empty for default namespace. -- **Search Query:** The search query. -- **Metadata Filter:** The metadata filter. Filters documents by metadata. Look at the [docs](https://upstash.com/docs/vector/features/filtering) for more information. -- **Embedding:** The embedding model used. To use Upstash's embeddings, don't provide an embedding. -- **Number of Results:** The number of results to return. - ---- - - -### Vectara {#b4e05230b62a47c792a89c5511af97ac} +#### Inputs +| Name | Type | Description | +| --------------- | ------------ | ----------------------------------------- | +| index_url | String | The URL of the Upstash index | +| index_token | SecretString | The token for the Upstash index | +| text_key | String | The key in the record to use as text | +| namespace | String | Namespace for the index | +| search_query | String | Query for similarity search | +| metadata_filter | String | Filters documents by metadata | +| ingest_data | Data | Data to be ingested into the vector store | +| embedding | Embeddings | Embedding function to use (optional) | +| number_of_results | Integer | Number of results to return in search | -`Vectara` sets up a Vectara Vector Store from files or upserted data, optimizing document retrieval. +#### Outputs +| Name | Type | Description | +| ------------- | ---------------- | ----------------------------------------- | +| vector_store | UpstashVectorStore| Upstash vector store instance | +| search_results| List[Data] | Results of similarity search | -**Parameters:** -- **Vectara Customer ID:** Customer ID. -- **Vectara Corpus ID:** Corpus ID. -- **Vectara API Key:** API key. -- **Files Url:** Optional URLs for file initialization. -- **Input:** Optional data for corpus upsert. +## Vectara -For more information, consult the [Vectara Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/vectara). +This component creates a Vectara Vector Store with search capabilities. +For more information, see the [Vectara documentation](https://docs.vectara.com/docs/). +### Parameters -:::info +#### Inputs -If inputs or files_url are provided, they will be processed accordingly. +| Name | Type | Description | +| ---------------- | ------------ | ----------------------------------------- | +| vectara_customer_id | String | Vectara customer ID | +| vectara_corpus_id | String | Vectara corpus ID | +| vectara_api_key | SecretString | Vectara API key | +| embedding | Embeddings | Embedding function to use (optional) | +| ingest_data | List[Document/Data] | Data to be ingested into the vector store | +| search_query | String | Query for similarity search | +| number_of_results | Integer | Number of results to return in search | -::: +#### Outputs +| Name | Type | Description | +| ------------- | ----------------- | ----------------------------------------- | +| vector_store | VectaraVectorStore | Vectara vector store instance | +| search_results| List[Data] | Results of similarity search | +## Vectara Search +This component searches a Vectara Vector Store for documents based on the provided input. +For more information, see the [Vectara documentation](https://docs.vectara.com/docs/). ---- - - -### Vectara Search {#31a47221c23f4fbba4a7465cf1d89eb0} - +### Parameters -`VectaraSearch` searches a Vectara Vector Store for documents based on the provided input. +#### Inputs +| Name | Type | Description | +|---------------------|--------------|-------------------------------------------| +| search_type | String | Type of search, such as "Similarity" or "MMR" | +| input_value | String | Search query | +| vectara_customer_id | String | Vectara customer ID | +| vectara_corpus_id | String | Vectara corpus ID | +| vectara_api_key | SecretString | Vectara API key | +| files_url | List[String] | Optional URLs for file initialization | -**Parameters:** - -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Vectara Customer ID:** Customer ID. -- **Vectara Corpus ID:** Corpus ID. -- **Vectara API Key:** API key. -- **Files Url:** Optional URLs for file initialization. - ---- +#### Outputs +| Name | Type | Description | +|----------------|------------|----------------------------| +| search_results | List[Data] | Results of similarity search | -### Weaviate {#57c7969574b1418dbb079ac5fc8cd857} +## Weaviate +This component facilitates a Weaviate Vector Store setup, optimizing text and document indexing and retrieval. +For more information, see the [Weaviate Documentation](https://weaviate.io/developers/weaviate). -`Weaviate` facilitates a Weaviate Vector Store setup, optimizing text and document indexing and retrieval. +### Parameters +#### Inputs -**Parameters:** +| Name | Type | Description | +|---------------|--------------|-------------------------------------------| +| weaviate_url | String | Default instance URL | +| search_by_text| Boolean | Indicates whether to search by text | +| api_key | SecretString | Optional API key for authentication | +| index_name | String | Optional index name | +| text_key | String | Default text extraction key | +| input | Document | Document or record | +| embedding | Embeddings | Model used | +| attributes | List[String] | Optional additional attributes | -- **Weaviate URL:** Default instance URL. -- **Search By Text:** Indicates whether to search by text. -- **API Key:** Optional API key for authentication. -- **Index Name:** Optional index name. -- **Text Key:** Default text extraction key. -- **Input:** Document or record. -- **Embedding:** Model used. -- **Attributes:** Optional additional attributes. +#### Outputs -For more details, see the [Weaviate Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/weaviate). +| Name | Type | Description | +|--------------|------------------|-------------------------------| +| vector_store | WeaviateVectorStore | Weaviate vector store instance | +**Note:** Ensure Weaviate instance is running and accessible. Verify API key, index name, text key, and attributes are set correctly. -NOTE - - -Ensure Weaviate instance is running and accessible. Verify API key, index name, text key, and attributes are set correctly. - - ---- - +## Weaviate Search -### Weaviate Search {#6d4e616dfd6143b28dc055bc1c40ecae} +This component searches a Weaviate Vector Store for documents similar to the input. +For more information, see the [Weaviate Documentation](https://weaviate.io/developers/weaviate). +### Parameters -`WeaviateSearch` searches a Weaviate Vector Store for documents similar to the input. +#### Inputs +| Name | Type | Description | +|---------------|--------------|-------------------------------------------| +| search_type | String | Type of search, such as "Similarity" or "MMR" | +| input_value | String | Search query | +| weaviate_url | String | Default instance URL | +| search_by_text| Boolean | Indicates whether to search by text | +| api_key | SecretString | Optional API key for authentication | +| index_name | String | Optional index name | +| text_key | String | Default text extraction key | +| embedding | Embeddings | Model used | +| attributes | List[String] | Optional additional attributes | -**Parameters:** +#### Outputs -- **Search Type:** Type of search, such as "Similarity" or "MMR". -- **Input Value:** Search query. -- **Weaviate URL:** Default instance URL. -- **Search By Text:** Indicates whether to search by text. -- **API Key:** Optional API key for authentication. -- **Index Name:** Optional index name. -- **Text Key:** Default text extraction key. -- **Embedding:** Model used. -- **Attributes:** Optional additional attributes. +| Name | Type | Description | +|----------------|------------|----------------------------| +| search_results | List[Data] | Results of similarity search | diff --git a/docs/docs/Components/components.md b/docs/docs/Components/components.md deleted file mode 100644 index 0598aebc9a0c..000000000000 --- a/docs/docs/Components/components.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -title: Intro to Components -sidebar_position: 0 -slug: /components ---- - - - -## Component {#0323a728d8314767adb907b998036bb4} - - ---- - - -A component is a single building block within a flow. It consists of inputs, outputs, and parameters that define their functionality. These elements provide a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work below. - - -During the flow creation process, you will notice handles (colored circles) attached to one or both sides of a component. These handles use distinct colors to indicate the types of inputs and outputs that can be interconnected. Hover over a handle to see connection details. - - -![](./565424296.png) - - -On the top right corner of the component, you'll find the a play button to run a component. Once it runs, a status icon appears and you can hover over that to visualize success or error messages. Start interacting with your AI by clicking the **Playground** at the bottom right of the workspace. - - -### Component Menu {#7e3f2f8ff5074b2fb3eee97c9cfaabe7} - - -Each component is unique, but they all have a menu bar at the top that looks something like this. - - -![](./938852908.png) - - -It consists of options such as: - -- **Code** — displays the component's Python code. You can modify the code and save it. -- **Advanced** — See and adjust all parameters of a component. -- **Freeze** — After a component runs, lock its previous output state to prevent it from re-running. - -Click **All** (the "..." button) to see all options. - - -### Output Preview {#ed7b3c34e0774b8a916b0e68821c9a7a} - - -Langflow includes an output visualizer for components that opens a pop-up screen. This allows you to easily inspect and monitor transmissions between components, providing instant feedback on your workflows. - - -![](./987204819.png) - - -### Advanced Settings {#b6430d4903df44f0ba4618a558c83d7b} - - -Langflow components can be edited by clicking the **Advanced Settings** button. - - -Hide parameters with the **Show** button to reduce complexity and keep the workspace clean and intuitive for experimentation. - - -You can also double-click a component's name and description to modify those. Component descriptions accept markdown syntax. - - -### Group Components {#c3f5ed818e3b40ceb6534dc358e1a5f2} - - -Multiple components can be grouped into a single component for reuse. This is useful when combining large flows into single components (like RAG with a vector database, for example) and saving space. - -1. Hold **Shift** and drag to select components. -2. Select **Group**. -3. The components merge into a single component. -4. Double-click the name and description to change them. -5. Save your grouped component to in the sidebar for later use! - -[group video here] - - -### Component Version {#887fd587589448dc8c27336d1c235b9b} - - -A component's state is stored in a database, while sidebar components are like starter templates. As soon as you drag a component from the sidebar to the workspace, the two components are no longer in parity. - - -The component will keep the version number it was initialized to the workspace with. Click the **Update Component** icon (exclamation mark) to bring the component up to the `latest` version. This will change the code of the component in place so you can validate that the component was updated by checking its Python code before and after updating it. - - -![](./263391508.png) - diff --git a/docs/docs/Components/custom-component-chat.png b/docs/docs/Components/custom-component-chat.png new file mode 100644 index 000000000000..aaf70c2e3976 Binary files /dev/null and b/docs/docs/Components/custom-component-chat.png differ diff --git a/docs/docs/Components/custom-component-inputs-chat.png b/docs/docs/Components/custom-component-inputs-chat.png new file mode 100644 index 000000000000..91797530438f Binary files /dev/null and b/docs/docs/Components/custom-component-inputs-chat.png differ diff --git a/docs/docs/Configuration/596474918.png b/docs/docs/Configuration/596474918.png deleted file mode 100644 index eb1c8de37171..000000000000 Binary files a/docs/docs/Configuration/596474918.png and /dev/null differ diff --git a/docs/docs/Configuration/My-Collection.md b/docs/docs/Configuration/My-Collection.md deleted file mode 100644 index ba631ae8014f..000000000000 --- a/docs/docs/Configuration/My-Collection.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: My Collection -sidebar_position: 3 -slug: /365085a8-a90a-43f9-a779-f8769ec7eca1 ---- - - - -:::info - -This page may contain outdated information. It will be updated as soon as possible. - -::: - - - - -My Collection is a space in Langflow where users can manage, organize, and access their flows and components. Flows and components are displayed as individual cards that provide relevant information. - - -![](./1289235516.png) - -- **Folders**: Users can organize their projects into folders. Default folders include "My Projects" and the ability to create new folders. Hover over a folder to access options to download or delete it. -- **Search Bar** Enables users to quickly search through their flows and components. -- **Select All**: This feature allows users to select all projects displayed on the page for batch actions like moving, deleting, or exporting. - -Click on a flow card to open it in Langflow Workspace or use the **Playground Button** for direct access to execute and interact with the flow’s chatbot interface. - - -## Folders {#776a3866273f4efbbbb2febdfc1baa12} - - ---- - - -Folders can help you keep your projects organized in Langflow. They help you manage and categorize your work efficiently, making it easier to find and access the resources you need. - - -![](./1926471667.png) - - -Multiple projects can be stored in **folders**. - - -Folders allow you to categorize flows and components into manageable groups. This makes it easier to find and access specific projects quickly. - - -**My Projects** is a default folder where all new projects and components are initially stored unless specified otherwise. Users can create custom folders to better organize their work according to specific needs. - - -Hovering over a folder in Langflow provides options to either remove or download the entire folder, allowing you to keep an offline copy or migrate projects between environments - - -Create new folders with the **New folder** button. One folder can store multiple projects (as the default My Projects folder does). - - -You can download folders of projects as a single JSON file, and upload files and flows to your folder. - - -Click the **Trash** icon to delete a folder. - - - -### How to Create Folders {#5ba5abe995c843e4a429e41413f9d539} - -1. **Navigate to the Home Screen:** - - Go to your Langflow Home Page (outside of projects). -2. **Create a New Folder:** - - Click on the "New Folder" button - - ![](./1125619904.png) - - - Double-click the new folder created to rename your folder appropriately to reflect its contents. - - ![](./945175915.png) - -3. **Move Files:** - - Drag and drop files into the corresponding folders and subfolders to keep everything organized. - - ![](./711485342.gif) - - -### Best Practices for Organizing Folders {#66f23f8e129a48598a7bb4565a508360} - -- **Categorize by Project:** Create a main folder for each project, then add projects for different aspects such as research, drafts, and final documents. -- **Use Descriptive Names:** Use clear and descriptive names for your folders to easily identify their contents at a glance. - -### Example Structure {#ebe6acad99c24d6f9aaabf18e4a17ff4} - - -Here's an example of how you might organize folders and subfolders for a Langflow project: - - -```text -Langflow -├── Research -│ ├── Articles Project -│ ├── Data Project -│ └── Notes Project -└── Documents - ├── RAG Project - └── Advanced RAG Project -``` - diff --git a/docs/docs/Configuration/configuration-api-keys.md b/docs/docs/Configuration/configuration-api-keys.md index e7f79b84aef7..68ce60752158 100644 --- a/docs/docs/Configuration/configuration-api-keys.md +++ b/docs/docs/Configuration/configuration-api-keys.md @@ -1,51 +1,29 @@ --- -title: API Keys +title: API keys sidebar_position: 1 slug: /configuration-api-keys --- - +Langflow provides an API key functionality that allows users to access their individual components and flows without traditional login authentication. The API key is a user-specific token that can be included in the request header, query parameter, or as a command line argument to authenticate API calls. This documentation outlines how to generate, use, and manage API keys in Langflow. :::info -This page may contain outdated information. It will be updated as soon as possible. +The default user and password are set using the LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD environment variables. The default values are `langflow` and `langflow`, respectively. ::: - - - -Langflow provides an API key functionality that allows users to access their individual components and flows without traditional login authentication. The API key is a user-specific token that can be included in the request header or query parameter to authenticate API calls. This documentation outlines how to generate, use, and manage API keys in Langflow. - - -:::info - -The default user and password are set using the LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD environment variables. The default values are langflow and langflow, respectively. - -::: - - - - -## Generate an API key {#c29986a69cad4cdbbe7537e383ea7207} - +## Generate an API key Generate a user-specific token to use with Langflow. +### Generate an API key with the Langflow UI -### Generate an API key with the Langflow UI {#3d90098ddd7c44b6836c0273acf57123} - -1. Click on the "API Key" icon. - - ![](./596474918.png) - -2. Click on "Create new secret key". -3. Give it an optional name. -4. Click on "Create secret key". -5. Copy the API key and store it in a secure location. - -### Generate an API key with the Langflow CLI {#2368f62fc4b8477e8080c9c2d3659d76} +1. Click your user icon and select **Settings**. +2. Click **Langflow API**, and then click **Add New**. +3. Name your key, and then click **Create Secret Key**. +4. Copy the API key and store it in a secure location. +### Generate an API key with the Langflow CLI ```shell langflow api-key @@ -64,148 +42,152 @@ python -m langflow api-key ``` - -## Use the Langflow API key {#ae787e4b0d3846aa9094fac75e0ac04f} - +## Authenticate requests with the Langflow API key Include your API key in API requests to authenticate requests to Langflow. +### Include the API key in the HTTP header -### Use the `x-api-key` header {#70965b3ad24d467ca4f90e7c13a1f394} - - -Include the `x-api-key` in the HTTP header when making API requests: - +To use the API key when making API requests with cURL, include the API key in the HTTP header. ```shell -curl -X POST \\ - \\ - -H 'Content-Type: application/json'\\ - -H 'x-api-key: '\\ +curl -X POST \ + "http://127.0.0.1:7860/api/v1/run/*`YOUR_FLOW_ID`*?stream=false" \ + -H 'Content-Type: application/json' \ + -H 'x-api-key: *`YOUR_API_KEY`*' \ -d '{"inputs": {"text":""}, "tweaks": {}}' - ``` +To instead pass the API key as a query parameter, do the following: -With Python using `requests`: +```shell +curl -X POST \ + "http://127.0.0.1:7860/api/v1/run/*`YOUR_FLOW_ID`*?x-api-key=*`YOUR_API_KEY`*?stream=false" \ + -H 'Content-Type: application/json' \ + -d '{"inputs": {"text":""}, "tweaks": {}}' +``` +To use the API key when making API requests with the Python `requests` library, include the API key as a variable string. ```python +import argparse +import json +from argparse import RawTextHelpFormatter import requests from typing import Optional +import warnings +try: + from langflow.load import upload_file +except ImportError: + warnings.warn("Langflow provides a function to help you upload files to the flow. Please install langflow to use it.") + upload_file = None + +BASE_API_URL = "http://127.0.0.1:7860" +FLOW_ID = "*`YOUR_FLOW_ID`*" +ENDPOINT = "" # You can set a specific endpoint name in the flow settings -BASE_API_URL = "" -FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df" # You can tweak the flow by adding a tweaks dictionary # e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} -TWEAKS = {} - -def run_flow(inputs: dict, - flow_id: str, - tweaks: Optional[dict] = None, - apiKey: Optional[str] = None) -> dict: +TWEAKS = { + "ChatInput-8a86T": {}, + "Prompt-pKfl9": {}, + "ChatOutput-WcGpD": {}, + "OpenAIModel-5UyvQ": {} +} + +def run_flow(message: str, + endpoint: str, + output_type: str = "chat", + input_type: str = "chat", + tweaks: Optional[dict] = None, + api_key: Optional[str] = None) -> dict: """ Run a flow with a given message and optional tweaks. :param message: The message to send to the flow - :param flow_id: The ID of the flow to run + :param endpoint: The ID or the endpoint name of the flow :param tweaks: Optional tweaks to customize the flow :return: The JSON response from the flow """ - api_url = f"{BASE_API_URL}/{flow_id}" - - payload = {"inputs": inputs} - headers = {} - + api_url = f"{BASE_API_URL}/api/v1/run/{endpoint}" + + payload = { + "input_value": message, + "output_type": output_type, + "input_type": input_type, + } + headers = None if tweaks: payload["tweaks"] = tweaks - if apiKey: - headers = {"x-api-key": apiKey} - + if api_key: + headers = {"x-api-key": api_key} response = requests.post(api_url, json=payload, headers=headers) return response.json() -# Setup any tweaks you want to apply to the flow -inputs = {"text":""} -api_key = "" -print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) +def main(): + parser = argparse.ArgumentParser(description="""Run a flow with a given message and optional tweaks. +Run it like: python .py "your message here" --endpoint "your_endpoint" --tweaks '{"key": "value"}'""", + formatter_class=RawTextHelpFormatter) + parser.add_argument("message", type=str, help="The message to send to the flow") + parser.add_argument("--endpoint", type=str, default=ENDPOINT or FLOW_ID, help="The ID or the endpoint name of the flow") + parser.add_argument("--tweaks", type=str, help="JSON string representing the tweaks to customize the flow", default=json.dumps(TWEAKS)) + parser.add_argument("--api_key", type=str, help="API key for authentication", default=None) + parser.add_argument("--output_type", type=str, default="chat", help="The output type") + parser.add_argument("--input_type", type=str, default="chat", help="The input type") + parser.add_argument("--upload_file", type=str, help="Path to the file to upload", default=None) + parser.add_argument("--components", type=str, help="Components to upload the file to", default=None) + + args = parser.parse_args() + try: + tweaks = json.loads(args.tweaks) + except json.JSONDecodeError: + raise ValueError("Invalid tweaks JSON string") + + if args.upload_file: + if not upload_file: + raise ImportError("Langflow is not installed. Please install it to use the upload_file function.") + elif not args.components: + raise ValueError("You need to provide the components to upload the file to.") + tweaks = upload_file(file_path=args.upload_file, host=BASE_API_URL, flow_id=args.endpoint, components=[args.components], tweaks=tweaks) + + response = run_flow( + message=args.message, + endpoint=args.endpoint, + output_type=args.output_type, + input_type=args.input_type, + tweaks=tweaks, + api_key=args.api_key + ) + + print(json.dumps(response, indent=2)) + +if __name__ == "__main__": + main() ``` - -### Use the query parameter {#febb797f3bb5403b9f070afc0fa4f453} - - -Include the API key as a query parameter in the URL: - +To pass the API key to your script with a command line argument, do the following: ```shell -curl -X POST \\ - ?x-api-key= \\ - -H 'Content-Type: application/json'\\ - -d '{"inputs": {"text":""}, "tweaks": {}}' - -``` - - -With Python using `requests`: - - -```python -import requests - -BASE_API_URL = "" -FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df" -# You can tweak the flow by adding a tweaks dictionary -# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} -TWEAKS = {} - -def run_flow(inputs: dict, - flow_id: str, - tweaks: Optional[dict] = None, - apiKey: Optional[str] = None) -> dict: - """ - Run a flow with a given message and optional tweaks. - - :param message: The message to send to the flow - :param flow_id: The ID of the flow to run - :param tweaks: Optional tweaks to customize the flow - :return: The JSON response from the flow - """ - api_url = f"{BASE_API_URL}/{flow_id}" - - payload = {"inputs": inputs} - headers = {} - - if tweaks: - payload["tweaks"] = tweaks - if apiKey: - api_url += f"?x-api-key={apiKey}" - - response = requests.post(api_url, json=payload, headers=headers) - return response.json() - -# Setup any tweaks you want to apply to the flow -inputs = {"text":""} -api_key = "" -print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) - +python your_script.py "*`YOUR_INPUT_MESSAGE`*" --api_key "*`YOUR_API_KEY`*" ``` - -## Security Considerations {#1273eb69a61344d19827b30dba46dfd5} +## Security considerations - **Visibility**: For security reasons, the API key cannot be retrieved again through the UI. - **Scope**: The key allows access only to the flows and components of the specific user to whom it was issued. -## Custom API endpoint {#da933a86690a4fdeac24024472caf8a9} - +## Custom API endpoint -Under **Project Settings** > **Endpoint Name**, you can pick a custom name for the endpoint used to call your flow from the API. +To choose a custom name for your API endpoint, select **Project Settings** > **Endpoint Name** and name your endpoint. +## Revoke an API key -## Revoke an API Key {#f0ea41ea167845cea91bb5e8f90d9df0} +To revoke an API key, delete it from the the list of keys in the **Settings** menu. +1. Click your user icon and select **Settings**. +2. Click **Langflow API**. +3. Select the keys you want to delete and click the trash can icon. -To revoke an API key, delete it from the UI. This action immediately invalidates the key and prevents it from being used again. +This action immediately invalidates the key and prevents it from being used again. diff --git a/docs/docs/Configuration/configuration-authentication.md b/docs/docs/Configuration/configuration-authentication.md index 77e6a5c18f10..622ace740713 100644 --- a/docs/docs/Configuration/configuration-authentication.md +++ b/docs/docs/Configuration/configuration-authentication.md @@ -4,149 +4,117 @@ sidebar_position: 0 slug: /configuration-authentication --- +The login functionality in Langflow serves to authenticate users and protect sensitive routes in the application. +Starting from version 0.5, Langflow introduces an enhanced login mechanism that is governed by a few environment variables. This allows new secure features. -:::info +## Create a superuser and new users in Langflow -This page may contain outdated information. It will be updated as soon as possible. +Learn how to create a new superuser, log in to Langflow, and add new users. -::: - - - - -## Sign Up and Sign In {#f480dac5d2094d75a433de0b8e195641} - - ---- +1. Create a `.env` file and open it in your preferred editor. +2. Add the following environment variables to your file. -The login functionality in Langflow serves to authenticate users and protect sensitive routes in the application. Starting from version 0.5, Langflow introduces an enhanced login mechanism that is governed by a few environment variables. This allows new secure features. - - -## Environment Variables {#3ed7cae6f5324ba0ac14783cf2a6cc07} - - -The following environment variables are crucial in configuring the login settings: - -- _`LANGFLOW_AUTO_LOGIN`_: Determines whether Langflow should automatically log users in. Default is `True`. -- _`LANGFLOW_SUPERUSER`_: The username of the superuser. -- _`LANGFLOW_SUPERUSER_PASSWORD`_: The password for the superuser. -- _`LANGFLOW_SECRET_KEY`_: A key used for encrypting the superuser's password. -- _`LANGFLOW_NEW_USER_IS_ACTIVE`_: Determines whether new users are automatically activated. Default is `False`. +```bash +LANGFLOW_AUTO_LOGIN=False +LANGFLOW_SUPERUSER=admin +LANGFLOW_SUPERUSER_PASSWORD=securepassword +LANGFLOW_SECRET_KEY=randomly_generated_secure_key +LANGFLOW_NEW_USER_IS_ACTIVE=False +``` -All of these variables can be passed to the CLI command _`langflow run`_ through the _`--env-file`_ option. For example: +For more information, see [Authentication configuration values](#values). +:::tip +The Langflow project includes a [`.env.example`](https://github.com/langflow-ai/langflow/blob/main/.env.example) file to help you get started. +You can copy the contents of this file into your own `.env` file and replace the example values with your own preferred settings. +::: -```shell -langflow run --env-file .env +3. Save your `.env` file. +4. Run Langflow with the configured environment variables. +```bash +python -m langflow run --env-file .env ``` +5. Sign in with your username `admin` and password `securepassword`. +6. To open the **Admin Page**, click your user profile image, and then select **Admin Page**. +You can also go to `http://127.0.0.1:7861/admin`. +7. To add a new user, click **New User**, and then add the **Username** and **Password**. +8. To activate the new user, select **Active**. +The user can only sign in if you select them as **Active**. +9. To give the user `superuser` priveleges, click **Superuser**. +10. Click **Save**. +11. To confirm your new user has been created, sign out of Langflow, and then sign back in using your new **Username** and **Password**. -:::caution +## Manage Superuser with the Langflow CLI - It is critical not to expose these environment variables in your code repository. Always set them securely in your deployment environment, for example, using Docker secrets, Kubernetes ConfigMaps/Secrets, or dedicated secure environment configuration systems like AWS Secrets Manager. +Langflow provides a command-line utility for interactively creating superusers: +1. Enter the CLI command: -::: +```bash +langflow superuser +``` +2. Langflow prompts you for a **Username** and **Password**: +``` +langflow superuser +Username: new_superuser_1 +Password: +Default folder created successfully. +Superuser created successfully. +``` +3. To confirm your new superuser was created successfully, go to the **Admin Page** at `http://127.0.0.1:7861/admin`. +## Authentication configuration values {#values} -### _`LANGFLOW_AUTO_LOGIN`_ {#8b10059e0fbc44f3bc8ce63fe7692e7e} +The following table lists the available authentication configuration variables, their descriptions, and default values: +| Variable | Description | Default | +|----------|-------------|---------| +| `LANGFLOW_AUTO_LOGIN` | Enables automatic login | `True` | +| `LANGFLOW_SUPERUSER` | Superuser username | - | +| `LANGFLOW_SUPERUSER_PASSWORD` | Superuser password | - | +| `LANGFLOW_SECRET_KEY` | Key for encrypting superuser password | - | +| `LANGFLOW_NEW_USER_IS_ACTIVE` | Automatically activates new users | `False` | -By default, this variable is set to `True`. When enabled (`True`), Langflow operates as it did in versions prior to 0.5—automatic login without requiring explicit user authentication. +### LANGFLOW_AUTO_LOGIN +By default, this variable is set to `True`. When enabled, Langflow operates as it did in versions prior to 0.5, including automatic login without requiring explicit user authentication. To disable automatic login and enforce user authentication: - ```shell export LANGFLOW_AUTO_LOGIN=False ``` +### LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD -### _`LANGFLOW_SUPERUSER`_ and _`LANGFLOW_SUPERUSER_PASSWORD`_ {#a61a651a0fc7443a82cec93c07a14503} - - -These environment variables are only relevant when `LANGFLOW_AUTO_LOGIN` is set to `False`. They specify the username and password for the superuser, which is essential for administrative tasks. - - +These environment variables are only relevant when LANGFLOW_AUTO_LOGIN is set to False. They specify the username and password for the superuser, which is essential for administrative tasks. To create a superuser manually: - -```shell +```bash export LANGFLOW_SUPERUSER=admin export LANGFLOW_SUPERUSER_PASSWORD=securepassword ``` - -You can also use the CLI command `langflow superuser` to set up a superuser interactively. - - -### _`LANGFLOW_SECRET_KEY`_ {#977aea34e6174c58bd76107990d62a1f} - +### LANGFLOW_SECRET_KEY This environment variable holds a secret key used for encrypting the superuser's password. Make sure to set this to a secure, randomly generated string. - -```shell +```bash export LANGFLOW_SECRET_KEY=randomly_generated_secure_key - -``` - - -### _`LANGFLOW_NEW_USER_IS_ACTIVE`_ {#c8f5df9283be4e20be51e14518f5272e} - - -By default, this variable is set to `False`. When enabled (`True`), new users are automatically activated and can log in without requiring explicit activation by the superuser. - - -## Manage superusers with the CLI {#3b0c36a5cc0f4acc95c884d3de858d46} - - -Langflow provides a command-line utility for managing superusers: - - -```shell -langflow superuser ``` +### LANGFLOW_NEW_USER_IS_ACTIVE -This command prompts you to enter the username and password for the superuser, unless they are already set using environment variables. - - -## Sign in {#736ebb8c854b4c268d5e748c119a08ea} - - -With _`LANGFLOW_AUTO_LOGIN`_ set to _`False`_, Langflow requires users to sign up before they can log in. The sign-up page is the default landing page when a user visits Langflow for the first time. - - -![](./1009571828.png) - - -## Profile settings {#dd5926e12471448d99bd6849d2149dc8} - - -Once signed in, you can change your profile settings by clicking on the profile icon in the top right corner of the Langflow dashboard. This opens a dropdown menu with the following options: - -- **Admin Page**: Opens the admin page, which is only accessible to the superuser. -- **Profile Settings**: Opens the profile settings page. -- **Sign Out**: Logs the user out. - - ![](./563306242.png) - - -Select **Profile Settings** to change your password and your profile picture. - +By default, this variable is set to `False`. When enabled, new users are automatically activated and can log in without requiring explicit activation by the superuser. -![](./1813063533.png) -Select **Admin Page** to manage users and groups as the superuser. -![](./383358552.png) diff --git a/docs/docs/Configuration/configuration-backend-only.md b/docs/docs/Configuration/configuration-backend-only.md index 7e6d906ae265..49aa54e9f93a 100644 --- a/docs/docs/Configuration/configuration-backend-only.md +++ b/docs/docs/Configuration/configuration-backend-only.md @@ -1,154 +1,123 @@ --- -title: Backend-Only +title: Run Langflow in backend-only mode sidebar_position: 4 slug: /configuration-backend-only --- +Langflow can run in `--backend-only` mode to expose a Langflow app as an API endpoint, without running the frontend UI. +This is also known as "headless" mode. Running Langflow without the frontend is useful for automation, testing, and situations where you just need to serve a flow as a workload without creating a new flow in the UI. +To run Langflow in backend-only mode, pass the `--backend-only` flag at startup. -:::info - -This page may contain outdated information. It will be updated as soon as possible. - -::: - - - - -You can run Langflow in `--backend-only` mode to expose your Langflow app as an API, without running the frontend UI. - +```python +python3 -m langflow run --backend-only +``` -Start langflow in backend-only mode with `python3 -m langflow run --backend-only`. +The terminal prints `Welcome to ⛓ Langflow`, and Langflow will now serve requests to its API without the frontend running. +## Set up a basic prompting flow in backend-only mode -The terminal prints `Welcome to ⛓ Langflow`, and a blank window opens at `http://127.0.0.1:7864/all`. -Langflow will now serve requests to its API without the frontend running. +This example shows you how to set up a [Basic Prompting flow](/starter-projects-basic-prompting) as an endpoint in backend-only mode. +However, you can use these same instructions as guidelines for using any type of flow in backend-only mode. +### Prerequisites -## Prerequisites {#81dfa9407ed648889081b9d08b0e5cfe} +- [Langflow is installed](/getting-started-installation) +- [You have an OpenAI API key](https://platform.openai.com/) +- [You have a Langflow Basic Prompting flow](/starter-projects-basic-prompting) -- [Langflow installed](/getting-started-installation) -- [OpenAI API key](https://platform.openai.com/) -- [A Langflow flow created](/starter-projects-basic-prompting) +### Get your flow's ID -## Download your flow's curl call {#d2cf1b694e4741eca07fd9806516007b} +This guide assumes you have created a [Basic Prompting flow](/starter-projects-basic-prompting) or have another working flow available. -1. Click API. -2. Click **curl** > **Copy code** and save the code to your local machine. +1. In the Langflow UI, click **API**. +2. Click **curl** > **Copy code** to copy the curl command. +This command will POST input to your flow's endpoint. It will look something like this: ```text -curl -X POST \\ - "" \\ - -H 'Content-Type: application/json'\\ +curl -X POST \ + "http://127.0.0.1:7861/api/v1/run/fff8dcaa-f0f6-4136-9df0-b7cb38de42e0?stream=false" \ + -H 'Content-Type: application/json'\ -d '{"input_value": "message", "output_type": "chat", "input_type": "chat", "tweaks": { - "Prompt-kvo86": {}, - "OpenAIModel-MilkD": {}, - "ChatOutput-ktwdw": {}, - "ChatInput-xXC4F": {} + "ChatInput-8a86T": {}, + "Prompt-pKfl9": {}, + "ChatOutput-WcGpD": {}, + "OpenAIModel-5UyvQ": {} }}' - ``` +The flow ID in this example is `fff8dcaa-f0f6-4136-9df0-b7cb38de42e0`, a UUID generated by Langflow and used in the endpoint URL. +See [API](/configuration-api-keys) to change the endpoint. -Note the flow ID of `ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef`. You can find this ID in the UI as well to ensure you're querying the right flow. +3. To stop Langflow, press **Ctrl+C**. +### Start Langflow in backend-only mode -## Start Langflow in backend-only mode {#f0ba018daf3041c39c0d226dadf78d35} +1. Start Langflow in backend-only mode. -1. Stop Langflow with Ctrl+C. -2. Start langflow in backend-only mode with `python3 -m langflow run --backend-only`. -The terminal prints `Welcome to ⛓ Langflow`, and a blank window opens at `http://127.0.0.1:7864/all`. -Langflow will now serve requests to its API. -3. Run the curl code you copied from the UI. +```python +python3 -m langflow run --backend-only +``` + +The terminal prints `Welcome to ⛓ Langflow`. +Langflow is now serving requests to its API. + +2. Run the curl code you copied from the UI. You should get a result like this: ```shell {"session_id":"ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef:bf81d898868ac87e1b4edbd96c131c5dee801ea2971122cc91352d144a45b880","outputs":[{"inputs":{"input_value":"hi, are you there?"},"outputs":[{"results":{"result":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?"},"artifacts":{"message":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?","sender":"Machine","sender_name":"AI"},"messages":[{"message":"Arrr, ahoy matey! Aye, I be here. What be ye needin', me hearty?","sender":"Machine","sender_name":"AI","component_id":"ChatOutput-ktwdw"}],"component_display_name":"Chat Output","component_id":"ChatOutput-ktwdw","used_frozen_result":false}]}]}% - ``` +This confirms Langflow is receiving your POST request, running the flow, and returning the result without running the frontend. -Again, note that the flow ID matches. -Langflow is receiving your POST request, running the flow, and returning the result, all without running the frontend. Cool! +You can interact with this endpoint using the other options in the **API** menu, including the Python and Javascript APIs. +### Query the Langflow endpoint with a Python script -## Download your flow's Python API call {#5923ff9dc40843c7a22a72fa6c66540c} +Using the same flow ID, run a Python sample script to send a query and get a prettified JSON response back. - -Instead of using curl, you can download your flow as a Python API call instead. - -1. Click API. -2. Click **Python API** > **Copy code** and save the code to your local machine. -The code will look something like this: +1. Create a Python file and name it `langflow_api_demo.py`. ```python import requests -from typing import Optional - -BASE_API_URL = "" -FLOW_ID = "ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef" -# You can tweak the flow by adding a tweaks dictionary -# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} - -def run_flow(message: str, - flow_id: str, - output_type: str = "chat", - input_type: str = "chat", - tweaks: Optional[dict] = None, - api_key: Optional[str] = None) -> dict: - """Run a flow with a given message and optional tweaks. - - :param message: The message to send to the flow - :param flow_id: The ID of the flow to run - :param tweaks: Optional tweaks to customize the flow - :return: The JSON response from the flow - """ - api_url = f"{BASE_API_URL}/{flow_id}" - payload = { - "input_value": message, - "output_type": output_type, - "input_type": input_type, - } - headers = None - if tweaks: - payload["tweaks"] = tweaks - if api_key: - headers = {"x-api-key": api_key} - response = requests.post(api_url, json=payload, headers=headers) - return response.json() - - # Setup any tweaks you want to apply to the flow - - message = "message" - - print(run_flow(message=message, flow_id=FLOW_ID)) +import json -``` +def query_langflow(message): + url = "http://127.0.0.1:7861/api/v1/run/fff8dcaa-f0f6-4136-9df0-b7cb38de42e0" + headers = {"Content-Type": "application/json"} + data = {"input_value": message} + response = requests.post(url, headers=headers, json=data) + return response.json() -3. Run your Python app: +user_input = input("Enter your message: ") +result = query_langflow(user_input) - -```shell -python3 app.py +print(json.dumps(result, indent=2)) ``` +2. Run the script. +```python +python langflow_api_demo.py +``` -The result is similar to the curl call: +3. Enter your message when prompted. +You will get a prettified JSON response back containing a response to your message. +### Configure host and ports in backend-only mode -```json -{'session_id': 'ef7e0554-69e5-4e3e-ab29-ee83bcd8d9ef:bf81d898868ac87e1b4edbd96c131c5dee801ea2971122cc91352d144a45b880', 'outputs': [{'inputs': {'input_value': 'message'}, 'outputs': [{'results': {'result': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!"}, 'artifacts': {'message': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!", 'sender': 'Machine', 'sender_name': 'AI'}, 'messages': [{'message': "Arrr matey! What be yer message for this ol' pirate? Speak up or walk the plank!", 'sender': 'Machine', 'sender_name': 'AI', 'component_id': 'ChatOutput-ktwdw'}], 'component_display_name': 'Chat Output', 'component_id': 'ChatOutput-ktwdw', 'used_frozen_result': False}]}]} +To change the host and port, pass the values as additional flags. +```python +python -m langflow run --host 127.0.0.1 --port 7860 --backend-only ``` -Your Python app POSTs to your Langflow server, and the server runs the flow and returns the result. -See [API](https://www.notion.so/administration/api) for more ways to interact with your headless Langflow server. diff --git a/docs/docs/Configuration/configuration-cli.md b/docs/docs/Configuration/configuration-cli.md index 770ff17fb228..205604103d04 100644 --- a/docs/docs/Configuration/configuration-cli.md +++ b/docs/docs/Configuration/configuration-cli.md @@ -1,168 +1,170 @@ --- -title: Command Line Interface (CLI) +title: Langflow CLI sidebar_position: 2 slug: /configuration-cli --- -:::info +# Langflow CLI -This page may contain outdated information. It will be updated as soon as possible. +The Langflow command line interface (Langflow CLI) is the main interface for managing and running the Langflow server. -::: - -Langflow's Command Line Interface (CLI) is a powerful tool that allows you to interact with the Langflow server from the command line. The CLI provides a wide range of commands to help you shape Langflow to your needs. +## CLI commands -The available commands are below. Navigate to their individual sections of this page to see the parameters. +The following sections describe the available CLI commands and their options, as well as their corresponding [environment variables](./environment-variables.md). -- [langflow](/configuration-cli) -- [langflow api-key](/configuration-cli) -- [langflow copy-db](/configuration-cli) -- [langflow migration](/configuration-cli) -- [langflow run](/configuration-cli) -- [langflow superuser](/configuration-cli) - -## Overview {#c50e5530289349cf8ed7bee22ba2211a} +### langflow Running the CLI without any arguments displays a list of available options and commands. -```shell -langflow -# or -langflow --help +```bash +langflow [OPTIONS] # or -python -m langflow - +python -m langflow [OPTIONS] ``` -| Command | Description | -| ----------- | ---------------------------------------------------------------------- | -| `api-key` | Creates an API key for the default superuser if AUTO_LOGIN is enabled. | -| `copy-db` | Copy the database files to the current directory (`which langflow`). | -| `migration` | Run or test migrations. | -| `run` | Run the Langflow. | -| `superuser` | Create a superuser. | - -### Options {#8a3b5b7ed55b4774ad6d533bb337ef47} +#### Options -| Option | Description | -| ---------------------- | -------------------------------------------------------------------------------- | -| `--install-completion` | Install completion for the current shell. | -| `--show-completion` | Show completion for the current shell, to copy it or customize the installation. | -| `--help` | Show this message and exit. | +| Option | Default | Values | Description | +|--------|------|-----------|-------------| +| `--install-completion` | *Not applicable* | *Not applicable* | Install auto-completion for the current shell. | +| `--show-completion` | *Not applicable* | *Not applicable* | Show the location of the auto-completion config file (if installed). | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | -## langflow api-key {#dbfc8c4c83474b83a38bdc7471bccf41} +### langflow api-key -Run the `api-key` command to create an API key for the default superuser if `LANGFLOW_AUTO_LOGIN` is set to `True`. +Create an API key for the default superuser if the [`LANGFLOW_AUTO_LOGIN` environment variable] is set to `true`. -```shell -langflow api-key +```bash +langflow api-key [OPTIONS] # or -python -m langflow api-key -╭─────────────────────────────────────────────────────────────────────╮ -│ API Key Created Successfully: │ -│ │ -│ sk-O0elzoWID1izAH8RUKrnnvyyMwIzHi2Wk-uXWoNJ2Ro │ -│ │ -│ This is the only time the API key will be displayed. │ -│ Make sure to store it in a secure location. │ -│ │ -│ The API key has been copied to your clipboard. Cmd + V to paste it. │ -╰────────────────────────────── - +python -m langflow api-key [OPTIONS] ``` -### Options {#ec2ef993dc984811b25838c8d8230b31} +#### Options -| Option | Type | Description | -| ----------- | ---- | ------------------------------------------------------------- | -| --log-level | TEXT | Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] | -| --help | | Show this message and exit. | +| Option | Default | Values | Description | +|--------|---------|--------|-------------| +| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level. | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | -## langflow copy-db {#729a13f4847545e5973d8f9c20f8833d} +### langflow copy-db -Run the `copy-db` command to copy the cached `langflow.db` and `langflow-pre.db` database files to the current directory. +Copy the database files to the current directory. +Copy the Langflow database files, `langflow.db` and `langflow-pre.db` (if they exist), from the cache directory to the current directory. -If the files exist in the cache directory, they will be copied to the same directory as `__main__.py`, which can be found with `which langflow`. +:::note +The current directory is the directory containing `__main__.py`. +You can find this directory by running `which langflow`. +::: + +```bash +langflow copy-db +# or +python -m langflow copy-db +``` -### Options {#7b7e6bd02b3243218e1d666711854673} +#### Options -None. +| Option | Default | Values | Description | +|--------|---------|--------|-------------| +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | -## langflow migration {#7027c1925a444119a7a8ea2bff4bd16d} +### langflow migration -Run or test migrations with the Alembic database tool. +Run or test database migrations. -```shell -langflow migration +```bash +langflow migration [OPTIONS] # or -python -m langflow migration - +python -m langflow migration [OPTIONS] ``` -### Options {#0b38fbe97bb34edeb7740a7db58433e9} +#### Options -| Option | Description | -| ------------------- | -------------------------------------------------------------------------------------------------------------------------- | -| `--test, --no-test` | Run migrations in test mode. [default: test] | -| `--fix, --no-fix` | Fix migrations. This is a destructive operation, and should only be used if you know what you are doing. [default: no-fix] | -| `--help` | Show this message and exit. | +| Option | Default | Values | Description | +|--------|---------|--------|-------------| +| `--test` | `true` | [Boolean](#boolean) | Run migrations in test mode. Use `--no-test` to disable test mode. | +| `--fix` | `false` (`--no-fix`) | [Boolean](#boolean) | Fix migrations. This is a destructive operation, and all affected data will be deleted. Only use this option if you know what you are doing. | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | -## langflow run {#fe050aa659cb4d33a560b859d54c94ea} -Run Langflow. +### langflow run -```shell -langflow run -# or -python -m langflow run +Start the Langflow server. +```bash +langflow run [OPTIONS] +# or +python -m langflow run [OPTIONS] ``` -### Options {#4e811481ec9142f1b60309bb1ce5a2ce} - -| Option | Description | -| ---------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `--help` | Displays all available options. | -| `--host` | Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`. | -| `--workers` | Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`. | -| `--timeout` | Sets the worker timeout in seconds. The default is `60`. | -| `--port` | Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`. | -| `--env-file` | Specifies the path to the .env file containing environment variables. The default is `.env`. | -| `--log-level` | Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. | -| `--components-path` | Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. | -| `--log-file` | Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`. | -| `--cache` | Select the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. | -| `--dev`/`--no-dev` | Toggles the development mode. The default is `no-dev`. | -| `--path` | Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable. | -| `--open-browser`/`--no-open-browser` | Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`. | -| `--remove-api-keys`/`--no-remove-api-keys` | Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`. | -| `--install-completion [bash\|zsh\|fish\|powershell\|pwsh]` | Installs completion for the specified shell. | -| `--show-completion [bash\|zsh\|fish\|powershell\|pwsh]` | Shows completion for the specified shell, allowing you to copy it or customize the installation. | -| `--backend-only` | This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable. | -| `--store` | This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable. | -| `--auto-saving` | This parameter, with a default value of `True`, enables the auto-saving functionality, use `--no-auto-saving` to deactivate it. It can be configured using the `LANGFLOW_AUTO_SAVING` environment variable. | - -### CLI environment variables {#5868aaccfcc74e26968538ef4d07e756} - -You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option. - -A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence. - -## langflow superuser {#5944233ce0c942878e928e1f2945d717} - -Create a superuser for Langflow. - -```shell -langflow superuser +#### Options + +| Option | Default | Values | Description | +|--------|---------|--------|-------------| +| `--host` | `127.0.0.1` | String | The host on which the Langflow server will run.
See [`LANGFLOW_HOST` variable](./environment-variables.md#LANGFLOW_HOST). | +| `--workers` | `1` | Integer | Number of worker processes.
See [`LANGFLOW_WORKERS` variable](./environment-variables.md#LANGFLOW_WORKERS). | +| `--worker-timeout` | `300` | Integer | Worker timeout in seconds.
See [`LANGFLOW_WORKER_TIMEOUT` variable](./environment-variables.md#LANGFLOW_WORKER_TIMEOUT). | +| `--port` | `7860` | Integer | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`LANGFLOW_PORT` variable](./environment-variables.md#LANGFLOW_PORT). | +| `--components-path` | `langflow/components` | String | Path to the directory containing custom components.
See [`LANGFLOW_COMPONENTS_PATH` variable](./environment-variables.md#LANGFLOW_COMPONENTS_PATH). | +| `--env-file` | Not set | String | Path to the `.env` file containing environment variables.
See [Import environment variables from a .env file](./environment-variables.md#configure-variables-env-file). | +| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level.
See [`LANGFLOW_LOG_LEVEL` variable](./environment-variables.md#LANGFLOW_LOG_LEVEL). | +| `--log-file` | `logs/langflow.log` | String | Set the path to the log file for Langflow.
See [`LANGFLOW_LOG_FILE` variable](./environment-variables.md#LANGFLOW_LOG_FILE). | +| `--cache` | `InMemoryCache` | `InMemoryCache`
`SQLiteCache` | Type of cache to use.
See [`LANGFLOW_LANGCHAIN_CACHE` variable](./environment-variables.md#LANGFLOW_LANGCHAIN_CACHE). | +| `--dev` | `false` (`--no-dev`) | [Boolean](#boolean) | Run Langflow in development mode (may contain bugs).
See [`LANGFLOW_DEV` variable](./environment-variables.md#LANGFLOW_DEV). | +| `--frontend-path` | `./frontend` | String | Path to the frontend directory containing build files. This is for development purposes only.
See [`LANGFLOW_FRONTEND_PATH` variable](./environment-variables.md#LANGFLOW_FRONTEND_PATH). | +| `--open-browser` | `true` | [Boolean](#boolean) | Open the system web browser on startup. Use `--no-open-browser` to disable opening the system web browser on startup.
See [`LANGFLOW_OPEN_BROWSER` variable](./environment-variables.md#LANGFLOW_OPEN_BROWSER). | +| `--remove-api-keys` | `false` (`--no-remove-api-keys`) | [Boolean](#boolean) | Remove API keys from the projects saved in the database.
See [`LANGFLOW_REMOVE_API_KEYS` variable](./environment-variables.md#LANGFLOW_REMOVE_API_KEYS). | +| `--backend-only` | `false` (`--no-backend-only`) | [Boolean](#boolean) | Only run Langflow's backend server (no frontend).
See [`LANGFLOW_BACKEND_ONLY` variable](./environment-variables.md#LANGFLOW_BACKEND_ONLY). | +| `--store` | `true` | [Boolean](#boolean) | Enable the Langflow Store features. Use `--no-store` to disable the Langflow Store features.
See [`LANGFLOW_STORE` variable](./environment-variables.md#LANGFLOW_STORE). | +| `--auto-saving` | `true` | [Boolean](#boolean) | Enable flow auto-saving. Use `--no-auto-saving` to disable flow auto-saving.
See [`LANGFLOW_AUTO_SAVING` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING). | +| `--auto-saving-interval` | `1000` | Integer | Set the interval for flow auto-saving in milliseconds.
See [`LANGFLOW_AUTO_SAVING_INTERVAL` variable](./environment-variables.md#LANGFLOW_AUTO_SAVING_INTERVAL). | +| `--health-check-max-retries` | `5` | Integer | Set the maximum number of retries for the health check. Use `--no-health-check-max-retries` to disable the maximum number of retries for the health check.
See [`LANGFLOW_HEALTH_CHECK_MAX_RETRIES` variable](./environment-variables.md#LANGFLOW_HEALTH_CHECK_MAX_RETRIES). | +| `--max-file-size-upload` | `100` | Integer | Set the maximum file size for the upload in megabytes.
See [`LANGFLOW_MAX_FILE_SIZE_UPLOAD` variable](./environment-variables.md#LANGFLOW_MAX_FILE_SIZE_UPLOAD). | +| `--help` | *Not applicable* | *Not applicable* | Display information about the command usage and its options and arguments. | + +### langflow superuser + +Create a superuser account. + +```bash +langflow superuser [OPTIONS] # or -python -m langflow superuser +python -m langflow superuser [OPTIONS] ``` -### Options {#f333c5635ead4c3d95985467bb08cc8f} +#### Options + +| Option | Default | Values | Description | +|--------|---------|--------|-------------| +| `--username` | Required | String | Specify the name for the superuser.
See [`LANGFLOW_SUPERUSER` variable](./environment-variables.md#LANGFLOW_SUPERUSER). | +| `--password` | Required | String | Specify the password for the superuser.
See [`LANGFLOW_SUPERUSER_PASSWORD` variable](./environment-variables.md#LANGFLOW_SUPERUSER_PASSWORD). | +| `--log-level` | `critical` | `debug`
`info`
`warning`
`error`
`critical` | Set the logging level. | + +## Precedence + +Langflow CLI options override the values of corresponding [environment variables](./environment-variables.md). + +For example, if you have `LANGFLOW_PORT=7860` defined as an environment variable, but you run the CLI with `--port 7880`, then Langflow will set the port to **`7880`** (the value passed with the CLI). + +## Assign values + +There are two ways you can assign a value to a CLI option. +You can write the option flag and its value with a single space between them: `--option value`. +Or, you can write them using an equals sign (`=`) between the option flag and the value: `--option=value`. + +Values that contain spaces must be surrounded by quotation marks: `--option 'Value with Spaces'` or `--option='Value with Spaces'`. + +### Boolean values {#boolean} + +Boolean options turn a behavior on or off, and therefore accept no arguments. +To activate a boolean option, type it on the command line. +For example: + +```bash +langflow run --remove-api-keys +``` -| Option | Type | Description | -| ------------- | ---- | ------------------------------------------------------------- | -| `--username` | TEXT | Username for the superuser. [default: None] [required] | -| `--password` | TEXT | Password for the superuser. [default: None] [required] | -| `--log-level` | TEXT | Logging level. [env var: LANGFLOW_LOG_LEVEL] [default: error] | -| `--help` | | Show this message and exit. | +All boolean options have a corresponding option that negates it. +For example, the negating option for `--remove-api-keys` is `--no-remove-api-keys`. +These options let you negate boolean options that you may have set using [environment variables](./environment-variables.md). diff --git a/docs/docs/Configuration/configuration-global-variables.md b/docs/docs/Configuration/configuration-global-variables.md new file mode 100644 index 000000000000..b29f26a03cc8 --- /dev/null +++ b/docs/docs/Configuration/configuration-global-variables.md @@ -0,0 +1,209 @@ +--- +title: Global variables +sidebar_position: 5 +slug: /configuration-global-variables +--- + +import ReactPlayer from "react-player"; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Global variables let you store and reuse generic input values and credentials across your projects. +You can use a global variable in any text input field that displays the 🌐 icon. + +Langflow stores global variables in its internal database, and encrypts the values using a secret key. + +## Create a global variable {#3543d5ef00eb453aa459b97ba85501e5} + +1. In the Langflow UI, click your profile icon, and then select **Settings**. + +2. Click **Global Variables**. + +3. Click **Add New**. + +4. In the **Create Variable** dialog, enter a name for your variable in the **Variable Name** field. + +5. Optional: Select a **Type** for your global variable. The available types are **Generic** (default) and **Credential**. + + No matter which **Type** you select, Langflow still encrypts the **Value** of the global variable. + +6. Enter the **Value** for your global variable. + +7. Optional: Use the **Apply To Fields** menu to select one or more fields that you want Langflow to automatically apply your global variable to. +For example, if you select **OpenAI API Key**, Langflow will automatically apply the variable to any **OpenAI API Key** field. + +8. Click **Save Variable**. + +You can now select your global variable from any text input field that displays the 🌐 icon. + +:::info +Because values are encrypted, you can't view the actual values of your global variables. +In **Settings > Global Variables**, the **Value** column shows the encrypted hash for **Generic** type variables, and shows nothing for **Credential** type variables. +::: + + + +## Edit a global variable + +1. In the Langflow UI, click your profile icon, and then select **Settings**. + +2. Click **Global Variables**. + +3. Click on the global variable you want to edit. + +4. In the **Update Variable** dialog, you can edit the following fields: **Variable Name**, **Value**, and **Apply To Fields**. + +5. Click **Update Variable**. + +## Delete a global variable + +:::warning +Deleting a global variable permanently deletes any references to it from your existing projects. +::: + +1. In the Langflow UI, click your profile icon, and then select **Settings**. + +2. Click **Global Variables**. + +3. Click the checkbox next to the global variable that you want to delete. + +4. Click the Trash icon. + +The global variable, and any existing references to it, are deleted. + +## Add global variables from the environment {#76844a93dbbc4d1ba551ea1a4a89ccdd} + +You can use the `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` environment variable to source global variables from your runtime environment. + + + + + +If you installed Langflow locally, you must define the `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` environment variable in a `.env` file. + +1. Create a `.env` file and open it in your preferred editor. + +2. Add the `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` environment variable as follows: + + ```plaintext title=".env" + LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=VARIABLE1,VARIABLE2 + ``` + + Replace `VARIABLE1,VARIABLE2` with a comma-separated list (no spaces) of variables that you want Langflow to source from the environment. + For example, `my_key,some_string`. + +3. Save and close the file. + +4. Start Langflow with the `.env` file: + + ```bash + VARIABLE1="VALUE1" VARIABLE2="VALUE2" python -m langflow run --env-file .env + ``` + + :::note + In this example, the environment variables (`VARIABLE1="VALUE1"` and `VARIABLE2="VALUE2"`) are prefixed to the startup command. + This is a rudimentary method for exposing environment variables to Python on the command line, and is meant for illustrative purposes. + Make sure to expose your environment variables to Langflow in a manner that best suits your own environment. + ::: + +5. Confirm that Langflow successfully sourced the global variables from the environment. + + 1. In the Langflow UI, click your profile icon, and then select **Settings**. + + 2. Click **Global Variables**. + + The environment variables appear in the list of **Global Variables**. + + + + + +If you're using Docker, you can pass `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` directly from the command line or from a `.env` file. + +To pass `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` directly from the command line: + +```bash +docker run -it --rm \ + -p 7860:7860 \ + -e LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT="VARIABLE1,VARIABLE2" \ + -e VARIABLE1="VALUE1" \ + -e VARIABLE2="VALUE2" \ + langflowai/langflow:latest +``` + +To pass `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` from a `.env` file: + +```bash +docker run -it --rm \ + -p 7860:7860 \ + --env-file .env \ + -e VARIABLE1="VALUE1" \ + -e VARIABLE2="VALUE2" \ + langflowai/langflow:latest +``` + + + + + +:::info +When adding global variables from the environment, the following limitations apply: + +- You can only source the **Name** and **Value** from the environment. + To add additional parameters, such as the **Apply To Fields** parameter, you must edit the global variables in the Langflow UI. + +- Global variables that you add from the the environment always have the **Credential** type. +::: + +:::tip +If you want to explicitly prevent Langflow from sourcing global variables from the environment, set `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to `false` in your `.env` file: + +```plaintext title=".env" +LANGFLOW_STORE_ENVIRONMENT_VARIABLES=false +``` + +::: + + +## Precautions + +Even though Langflow stores global variables in its internal database, and encrypts the values using a secret key, you should consider taking extra precautions to ensure the database and secret key are protected. + +### Use a custom secret key + +By default, Langflow generates a random secret key. +However, you should provide your own secret key, as it's more secure to use a key that is already known to you. + +Use the `LANGFLOW_SECRET_KEY` environment variable to provide a custom value for the secret key when you start Langflow. + +### Protect the secret key + +Make sure to store the secret key in a secure location. + +By default, Langflow stores the secret key in its configuration directory. +The location of the configuration directory depends on your operating system: + +- macOS: `~/Library/Caches/langflow/secret_key` +- Linux: `~/.cache/langflow/secret_key` +- Windows: `%USERPROFILE%\AppData\Local\langflow\secret_key` + +To change the location of the the configuration directory, and thus the location of the secret key, set the `LANGFLOW_CONFIG_DIR` environment variable to your preferred storage directory. + +### Protect the database + +Make sure to store Langflow's internal database file in a secure location, and take regular backups to prevent accidental data loss. + +By default, Langflow stores the database file in its installation directory. +The location of the file depends on your operating system and installation method: + +- macOS: `PYTHON_LOCATION/site-packages/langflow/langflow.db` +- Linux: `PYTHON_LOCATION/site-packages/langflow/langflow.db` +- Windows: `PYTHON_LOCATION\Lib\site-packages\langflow\langflow.db` + +To change the location of the database file, follow these steps: + +1. Set the `LANGFLOW_SAVE_DB_IN_CONFIG_DIR` environment variable to `true`. +2. Set the `LANGFLOW_CONFIG_DIR` environment variable to your preferred storage directory. + + + \ No newline at end of file diff --git a/docs/docs/Configuration/environment-variables.md b/docs/docs/Configuration/environment-variables.md new file mode 100644 index 000000000000..fa1d907b6a64 --- /dev/null +++ b/docs/docs/Configuration/environment-variables.md @@ -0,0 +1,144 @@ +--- +title: Environment variables +sidebar_position: 7 +slug: /environment-variables +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Langflow lets you configure a number of settings using environment variables. + +## Configure environment variables + +Langflow recognizes [supported environment variables](#supported-variables) from the following sources: + +- Environment variables that you've set in your terminal. +- Environment variables that you've imported from a `.env` file using the `--env-file` option in the Langflow CLI. + +You can choose to use one source exclusively, or use both sources together. +If you choose to use both sources together, be aware that environment variables imported from a `.env` file take [precedence](#precedence) over those set in your terminal. + +### Set environment variables in your terminal {#configure-variables-terminal} + +Run the following commands to set environment variables for your current terminal session: + + + + +```bash +export VARIABLE_NAME='VALUE' +``` + + + +``` +set VARIABLE_NAME='VALUE' +``` + + + +```bash +docker run -it --rm \ + -p 7860:7860 \ + -e VARIABLE_NAME='VALUE' \ + langflowai/langflow:latest +``` + + + + +When you start Langflow, it looks for environment variables that you've set in your terminal. +If it detects a supported environment variable, then it automatically adopts the specified value, subject to [precedence rules](#precedence). + +### Import environment variables from a .env file {#configure-variables-env-file} + +1. Create a `.env` file and open it in your preferred editor. + +2. Add your environment variables to the file: + + ```plaintext title=".env" + VARIABLE_NAME='VALUE' + VARIABLE_NAME='VALUE' + ``` + + :::tip + The Langflow project includes a [`.env.example`](https://github.com/langflow-ai/langflow/blob/main/.env.example) file to help you get started. + You can copy the contents of this file into your own `.env` file and replace the example values with your own preferred settings. + ::: + +3. Save and close the file. + +4. Start Langflow using the `--env-file` option to define the path to your `.env` file: + + + + + ```bash + python -m langflow run --env-file .env + ``` + + + + ```bash + docker run -it --rm \ + -p 7860:7860 \ + --env-file .env \ + langflowai/langflow:latest + ``` + + + + +On startup, Langflow imports the environment variables from your `.env` file, as well as any that you [set in your terminal](#configure-variables-terminal), and adopts their specified values. + +## Precedence {#precedence} + +Environment variables [defined in the .env file](#configure-variables-env-file) take precedence over those [set in your terminal](#configure-variables-terminal). +That means, if you happen to set the same environment variable in both your terminal and your `.env` file, Langflow adopts the value from the the `.env` file. + +:::info[CLI precedence] +[Langflow CLI options](./configuration-cli.md) override the value of corresponding environment variables defined in the `.env` file as well as any environment variables set in your terminal. +::: + +## Supported environment variables {#supported-variables} + +The following table lists the environment variables supported by Langflow. + +| Variable | Format / Values | Default | Description | +|----------|---------------|---------|-------------| +| `DO_NOT_TRACK` | Boolean | `false` | If enabled, Langflow will not track telemetry. | +| `LANGFLOW_AUTO_LOGIN` | Boolean | `true` | Enable automatic login for Langflow. Set to `false` to disable automatic login and require the login form to log into the Langflow UI. Setting to `false` requires [`LANGFLOW_SUPERUSER`](#LANGFLOW_SUPERUSER) and [`LANGFLOW_SUPERUSER_PASSWORD`](#LANGFLOW_SUPERUSER_PASSWORD) to be set. | +| `LANGFLOW_AUTO_SAVING` | Boolean | `true` | Enable flow auto-saving.
See [`--auto-saving` option](./configuration-cli.md#run-auto-saving). | +| `LANGFLOW_AUTO_SAVING_INTERVAL` | Integer | `1000` | Set the interval for flow auto-saving in milliseconds.
See [`--auto-saving-interval` option](./configuration-cli.md#run-auto-saving-interval). | +| `LANGFLOW_BACKEND_ONLY` | Boolean | `false` | Only run Langflow's backend server (no frontend).
See [`--backend-only` option](./configuration-cli.md#run-backend-only). | +| `LANGFLOW_CACHE_TYPE` | `async`
`redis`
`memory`
`disk`
`critical` | `async` | Set the cache type for Langflow.
If you set the type to `redis`, then you must also set the following environment variables: [`LANGFLOW_REDIS_HOST`](#LANGFLOW_REDIS_HOST), [`LANGFLOW_REDIS_PORT`](#LANGFLOW_REDIS_PORT), [`LANGFLOW_REDIS_DB`](#LANGFLOW_REDIS_DB), and [`LANGFLOW_REDIS_CACHE_EXPIRE`](#LANGFLOW_REDIS_CACHE_EXPIRE). | +| `LANGFLOW_COMPONENTS_PATH` | String | `langflow/components` | Path to the directory containing custom components.
See [`--components-path` option](./configuration-cli.md#run-components-path). | +| `LANGFLOW_CONFIG_DIR` | String | | Set the Langflow configuration directory where files, logs, and the Langflow database are stored. | +| `LANGFLOW_DATABASE_URL` | String | | Set the database URL for Langflow. If you don't provide one, Langflow uses an SQLite database. | +| `LANGFLOW_DEV` | Boolean | `false` | Run Langflow in development mode (may contain bugs).
See [`--dev` option](./configuration-cli.md#run-dev). | +| `LANGFLOW_FALLBACK_TO_ENV_VAR` | Boolean | `true` | If enabled, [global variables](../Configuration/configuration-global-variables.md) set in the Langflow UI fall back to an environment variable with the same name when Langflow fails to retrieve the variable value. | +| `LANGFLOW_FRONTEND_PATH` | String | `./frontend` | Path to the frontend directory containing build files. This is for development purposes only.
See [`--frontend-path` option](./configuration-cli.md#run-frontend-path). | +| `LANGFLOW_HEALTH_CHECK_MAX_RETRIES` | Integer | `5` | Set the maximum number of retries for the health check.
See [`--health-check-max-retries` option](./configuration-cli.md#run-health-check-max-retries). | +| `LANGFLOW_HOST` | String | `127.0.0.1` | The host on which the Langflow server will run.
See [`--host` option](./configuration-cli.md#run-host). | +| `LANGFLOW_LANGCHAIN_CACHE` | `InMemoryCache`
`SQLiteCache` | `InMemoryCache` | Type of cache to use.
See [`--cache` option](./configuration-cli.md#run-cache). | +| `LANGFLOW_LOG_FILE` | String | `logs/langflow.log` | Set the path to the log file for Langflow.
See [`--log-file` option](./configuration-cli.md#run-log-file). | +| `LANGFLOW_LOG_LEVEL` | `debug`
`info`
`warning`
`error`
`critical` | `critical` | Set the logging level.
See [`--log-level` option](./configuration-cli.md#run-log-level). | +| `LANGFLOW_MAX_FILE_SIZE_UPLOAD` | Integer | `100` | Set the maximum file size for the upload in megabytes.
See [`--max-file-size-upload` option](./configuration-cli.md#run-max-file-size-upload). | +| `LANGFLOW_OPEN_BROWSER` | Boolean | `true` | Open the system web browser on startup.
See [`--open-browser` option](./configuration-cli.md#run-open-browser). | +| `LANGFLOW_PORT` | Integer | `7860` | The port on which the Langflow server will run. The server automatically selects a free port if the specified port is in use.
See [`--port` option](./configuration-cli.md#run-port). | +| `LANGFLOW_PROMETHEUS_ENABLED` | Boolean | `false` | Expose Prometheus metrics. | +| `LANGFLOW_PROMETHEUS_PORT` | Integer | `9090` | Set the port on which Langflow exposes Prometheus metrics. | +| `LANGFLOW_REDIS_CACHE_EXPIRE` | Integer | `3600` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_DB` | Integer | `0` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_HOST` | String | `localhost` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REDIS_PORT` | String | `6379` | See [`LANGFLOW_CACHE_TYPE`](#LANGFLOW_CACHE_TYPE). | +| `LANGFLOW_REMOVE_API_KEYS` | Boolean | `false` | Remove API keys from the projects saved in the database.
See [`--remove-api-keys` option](./configuration-cli.md#run-remove-api-keys). | +| `LANGFLOW_SAVE_DB_IN_CONFIG_DIR` | Boolean | `false` | Save the Langflow database in [`LANGFLOW_CONFIG_DIR`](#LANGFLOW_CONFIG_DIR) instead of in the Langflow package directory. Note, when this variable is set to default (`false`), the database isn't shared between different virtual environments and the database is deleted when you uninstall Langflow. | +| `LANGFLOW_STORE` | Boolean | `true` | Enable the Langflow Store.
See [`--store` option](./configuration-cli.md#run-store). | +| `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` | Boolean | `true` | Store environment variables as [global variables](../Configuration/configuration-global-variables.md) in the database. | +| `LANGFLOW_SUPERUSER` | String | Not set | Set the name for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --username` option](./configuration-cli.md#superuser-username). | +| `LANGFLOW_SUPERUSER_PASSWORD` | String | Not set | Set the password for the superuser. Required if [`LANGFLOW_AUTO_LOGIN`](#LANGFLOW_AUTO_LOGIN) is set to `false`.
See [`superuser --password` option](./configuration-cli.md#superuser-password).| +| `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` | String | Not set | Comma-separated list of environment variables to get from the environment and store as [global variables](../Configuration/configuration-global-variables.md). | +| `LANGFLOW_WORKER_TIMEOUT` | Integer | `300` | Worker timeout in seconds.
See [`--worker-timeout` option](./configuration-cli.md#run-worker-timeout). | +| `LANGFLOW_WORKERS` | Integer | `1` | Number of worker processes.
See [`--workers` option](./configuration-cli.md#run-workers). | diff --git a/docs/docs/Contributing/contributing-community.md b/docs/docs/Contributing/contributing-community.md index 7b8a0116e1b1..f37ae5d7e525 100644 --- a/docs/docs/Contributing/contributing-community.md +++ b/docs/docs/Contributing/contributing-community.md @@ -1,52 +1,26 @@ --- -title: Community -sidebar_position: 3 +title: Join the Langflow community +sidebar_position: 5 slug: /contributing-community --- -## 🤖 Join **Langflow** Discord server {#80011e0bda004e83a8012c7ec6eab29a} +## Join the Langflow Discord server +Join the [Langflow Discord Server](https://discord.gg/EqksyE2EX9) to ask questions and showcase your projects. ---- - - -Join us to ask questions and showcase your projects. - - -Let's bring together the building blocks of AI integration! - - -Langflow [Discord](https://discord.gg/EqksyE2EX9) server. - - -## 🐦 Stay tuned for **Langflow** on Twitter {#6a17ba5905ad4f7aa5347af7854779f6} - - ---- +## Follow Langflow on X +Follow [@langflow_ai](https://twitter.com/langflow_ai) on X to get the latest news about Langflow. -Follow [@langflow_ai](https://twitter.com/langflow_ai) on **Twitter** to get the latest news about **Langflow**. +## Star Langflow on GitHub +You can [star Langflow in GitHub](https://github.com/langflow-ai/langflow). -## ⭐️ Star **Langflow** on GitHub {#c903a569934643799bf52b7d1b3514e1} - - ---- - - -You can "star" **Langflow** in [GitHub](https://github.com/langflow-ai/langflow). - - -By adding a star, other users will be able to find it more easily and see that it has been already useful for others. - - -## 👀 Watch the GitHub repository for releases {#d0a089ed717742308bd17430e5ae6309} - - ---- +By adding a star, other users will be able to find Langflow more easily, and see that it has been already useful for others. +## Watch the GitHub repository for releases -You can "watch" **Langflow** in [GitHub](https://github.com/langflow-ai/langflow). If you select "Watching" instead of "Releases only" you will receive notifications when someone creates a new issue or question. You can also specify that you only want to be notified about new issues, discussions, PRs, etc. so you can try and help them solve those questions. +You can [watch Langflow in GitHub](https://github.com/langflow-ai/langflow). If you select **Watching** instead of **Releases only** you will receive notifications when someone creates a new issue or question. You can also specify that you want to be notified only about new issues, discussions, and PRs so you can try to help solve those issues. diff --git a/docs/docs/Contributing/contributing-components.md b/docs/docs/Contributing/contributing-components.md new file mode 100644 index 000000000000..0cac371e49b6 --- /dev/null +++ b/docs/docs/Contributing/contributing-components.md @@ -0,0 +1,24 @@ +--- +title: Contribute components +sidebar_position: 4 +slug: /contributing-components +--- + + +New components are added as objects of the [CustomComponent](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/custom/custom_component/custom_component.py) class. + +Any dependencies are added to the [pyproject.toml](https://github.com/langflow-ai/langflow/blob/main/pyproject.toml#L148) file. + +### Contribute an example component to Langflow + +Anyone can contribute an example component. For example, if you created a new document loader called **MyCustomDocumentLoader**, you can follow these steps to contribute it to Langflow. + +1. Write your loader as an object of the [CustomComponent](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/custom/custom_component/custom_component.py) class. You'll create a new class, `MyCustomDocumentLoader`, that will inherit from `CustomComponent` and override the base class's methods. +2. Define optional attributes like `display_name`, `description`, and `documentation` to provide information about your custom component. +3. Implement the `build_config` method to define the configuration options for your custom component. +4. Implement the `build` method to define the logic for taking input parameters specified in the `build_config` method and returning the desired output. +5. Add the code to the [/components/documentloaders](https://github.com/langflow-ai/langflow/tree/dev/src/backend/base/langflow/components) folder. +6. Add the dependency to [/documentloaders/__init__.py](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/components/documentloaders/__init__.py) as `from .MyCustomDocumentLoader import MyCustomDocumentLoader`. +7. Add any new dependencies to the [pyproject.toml](https://github.com/langflow-ai/langflow/blob/main/pyproject.toml#L148) file. +8. Submit documentation for your component. For this example, you'd submit documentation to the [loaders page](https://github.com/langflow-ai/langflow/blob/main/docs/docs/Components/components-loaders.md). +9. Submit your changes as a pull request. The Langflow team will have a look, suggest changes, and add your component to Langflow. \ No newline at end of file diff --git a/docs/docs/Contributing/contributing-github-discussion-board.md b/docs/docs/Contributing/contributing-github-discussion-board.md new file mode 100644 index 000000000000..e17d5366d768 --- /dev/null +++ b/docs/docs/Contributing/contributing-github-discussion-board.md @@ -0,0 +1,13 @@ +--- +title: Ask for help on the Discussions board +sidebar_position: 3 +slug: /contributing-github-discussions +--- + +If you're looking for help with your code, consider posting a question on the Langflow [GitHub Discussions board](https://github.com/langflow-ai/langflow/discussions). The Langflow team cannot provide individual support via email. The team also believes that help is much more valuable if it's shared publicly, so that more people can benefit from it. + +Since the Discussions board is public, please follow this guidance when posting your code questions. + +* When describing your issue, try to provide as many details as possible. What exactly goes wrong? _How_ is it failing? Is there an error? "XY doesn't work" usually isn't that helpful for tracking down problems. Always remember to include the code you ran and if possible, extract only the relevant parts and don't just dump your entire script. This will make it easier for us to reproduce the error. + +* When you include long code, logs, or tracebacks, wrap them in `
` and `
` tags. This [collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so the contents only becomes visible on click, making the issue easier to read and follow. \ No newline at end of file diff --git a/docs/docs/Contributing/contributing-github-issues.md b/docs/docs/Contributing/contributing-github-issues.md index 41e3e63172d8..11105a7e35a0 100644 --- a/docs/docs/Contributing/contributing-github-issues.md +++ b/docs/docs/Contributing/contributing-github-issues.md @@ -1,24 +1,7 @@ --- -title: GitHub Issues +title: Request an enhancement or report a bug sidebar_position: 2 slug: /contributing-github-issues --- - - -Our [issues](https://github.com/langflow-ai/langflow/issues) page is kept up to date with bugs, improvements, and feature requests. There is a taxonomy of labels to help with sorting and discovery of issues of interest. - - -If you're looking for help with your code, consider posting a question on the [GitHub Discussions board](https://github.com/langflow-ai/langflow/discussions). Please understand that we won't be able to provide individual support via email. We also believe that help is much more valuable if it's **shared publicly**, so that more people can benefit from it. - -- **Describing your issue:** Try to provide as many details as possible. What exactly goes wrong? _How_ is it failing? Is there an error? "XY doesn't work" usually isn't that helpful for tracking down problems. Always remember to include the code you ran and if possible, extract only the relevant parts and don't just dump your entire script. This will make it easier for us to reproduce the error. -- **Sharing long blocks of code or logs:** If you need to include long code, logs or tracebacks, you can wrap them in `
` and `
`. This [collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so it only becomes visible on click, making the issue easier to read and follow. - -## Issue labels {#e19eae656c914ce7aedc4f55565cc0bc} - - ---- - - -[See this page](https://github.com/langflow-ai/langflow/labels) for an overview of the system we use to tag our issues and pull requests. - +The [Issues page in the Langflow repo](https://github.com/langflow-ai/langflow/issues) is kept up to date with bugs, improvements, and feature requests. Labels are used to help with sorting and discovery of issues of interest. For an overview of the system Langflow uses to tag issues and pull requests, see the Langflow repo's [labels page](https://github.com/langflow-ai/langflow/labels). \ No newline at end of file diff --git a/docs/docs/Contributing/contributing-how-to-contribute.md b/docs/docs/Contributing/contributing-how-to-contribute.md index 4b93e62cb14e..8a87bcd5d694 100644 --- a/docs/docs/Contributing/contributing-how-to-contribute.md +++ b/docs/docs/Contributing/contributing-how-to-contribute.md @@ -1,160 +1,116 @@ --- -title: How to contribute? +title: Contribute to Langflow sidebar_position: 1 slug: /contributing-how-to-contribute --- +This guide is intended to help you start contributing to Langflow. +As an open-source project in a rapidly developing field, Langflow welcomes contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation. +To contribute code or documentation to this project, follow the [fork and pull request](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. -:::info +## Contribute code -This page may contain outdated information. It will be updated as soon as possible. +Develop Langflow locally with [uv](https://docs.astral.sh/uv/getting-started/installation/) and [Node.js](https://nodejs.org/en/download/package-manager). -::: +### Prerequisites +* [uv(>=0.4)](https://docs.astral.sh/uv/getting-started/installation/) +* [Node.js](https://nodejs.org/en/download/package-manager) +### Clone the Langflow Repository +1. Navigate to the [Langflow GitHub repository](https://github.com/langflow-ai/langflow), and then click **Fork**. -👋 Hello there! +2. Add the new remote to your local repository on your local machine: -We welcome contributions from developers of all levels to our open-source project on [GitHub](https://github.com/langflow-ai/langflow). If you'd like to contribute, please check our contributing guidelines and help make Langflow more accessible. +```bash +git remote add fork https://github.com//langflow.git +``` +### Prepare the development environment +1. Create development hooks. -As an open-source project in a rapidly developing field, we are extremely open to contributions, whether in the form of a new feature, improved infra, or better documentation. +```bash +make init +``` +This command sets up the development environment by installing backend and frontend dependencies, building the frontend static files, and initializing the project. It runs `make install_backend`, `make install_frontend`, `make build_frontend`, and finally `uv run langflow run` to start the application. +2. Run `make lint`, `make format`, and `make unit_tests` before pushing to the repository. -To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. Please do not try to push directly to this repo unless you are a maintainer. +### Debug +The repo includes a `.vscode/launch.json` file for debugging the backend in VSCode, which is faster than debugging with Docker Compose. To debug Langflow with the `launch.json` file in VSCode: -## Local Development {#0388cc3c758d434d994022863a6bafa9} +1. Open Langflow in VSCode. +2. Press **Ctrl+Shift+D** for Windows **or Cmd+Shift+D** for Mac to open the Run and Debug view. +3. From the **Run and Debug** dropdown, choose a debugging configuration. +4. Click the green **Play** button or press F5 to start debugging. +Use `launch.json` to quickly debug different parts of your application, like the backend, frontend, or CLI, directly from VSCode. ---- - - -You can develop Langflow using docker compose, or locally. - - -We provide a `.vscode/launch.json` file for debugging the backend in VSCode, which is a lot faster than using docker compose. - - -Setting up hooks: - - -`make init` - - -This will install the pre-commit hooks, which will run `make format` on every commit. - - -It is advised to run `make lint` before pushing to the repository. - - -## Run Locally {#5225c2ef0cd6403c9f6c6bbd888115e0} - - ---- - - -Langflow can run locally by cloning the repository and installing the dependencies. We recommend using a virtual environment to isolate the dependencies from your system. - - -Before you start, make sure you have the following installed: - -- Poetry (>=1.4) -- Node.js - -Then, in the root folder, install the dependencies and start the development server for the backend: - - -`make backend` - - -And the frontend: - - -`make frontend` +### Run Langflow locally +After setting up the environment with `make init`, you can run Langflow's backend and frontend separately for development. +Langflow recommends using a virtual environment like [venv](https://docs.python.org/3/library/venv.html) or [conda](https://anaconda.org/anaconda/conda) to isolate dependencies. -## Docker Compose {#b07f359414ff4220ac615afc364ee46e} +Before you begin, ensure you have [uv](https://docs.astral.sh/uv/getting-started/installation/) and [Node.js](https://nodejs.org/en/download/package-manager) installed. +1. In the repository root, install the dependencies and start the development server for the backend: ---- - - -The following snippet will run the backend and frontend in separate containers. The frontend will be available at `localhost:3000` and the backend at `localhost:7860`. - - -`docker compose up --build# ormake dev build=1` - - -## Documentation {#5f34bcaeccdc4489b0c5ee2c4a21354e} - - ---- - - -The documentation is built using [Docusaurus](https://docusaurus.io/). To run the documentation locally, run the following commands: - - -`cd docsnpm installnpm run start` - +```bash +make backend +``` -The documentation will be available at `localhost:3000` and all the files are located in the `docs/docs` folder. Once you are done with your changes, you can create a Pull Request to the `main` branch. +2. Install dependencies and start the frontend: +```bash +make frontend +``` -## Submitting Components {#9676353bc4504551a4014dd572ac8be8} - - ---- - - -New components are added as objects of the [CustomComponent](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/interface/custom/custom_component/custom_component.py) class and any dependencies are added to the [pyproject.toml](https://github.com/langflow-ai/langflow/blob/dev/pyproject.toml#L27) file. - - -### Add an example component {#8caae106c853465d83183e7f5272e4d8} - - -You have a new document loader called **MyCustomDocumentLoader** and it would look awesome in Langflow. - -1. Write your loader as an object of the [CustomComponent](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/interface/custom/custom_component/custom_component.py) class. You'll create a new class, `MyCustomDocumentLoader`, that will inherit from `CustomComponent` and override the base class's methods. -2. Define optional attributes like `display_name`, `description`, and `documentation` to provide information about your custom component. -3. Implement the `build_config` method to define the configuration options for your custom component. -4. Implement the `build` method to define the logic for taking input parameters specified in the `build_config` method and returning the desired output. -5. Add the code to the [/components/documentloaders](https://github.com/langflow-ai/langflow/tree/dev/src/backend/base/langflow/components) folder. -6. Add the dependency to [/documentloaders/__init__.py](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/components/documentloaders/__init__.py) as `from .MyCustomDocumentLoader import MyCustomDocumentLoader`. -7. Add any new dependencies to the outer [pyproject.toml](https://github.com/langflow-ai/langflow/blob/dev/pyproject.toml#L27) file. -8. Submit documentation for your component. For this example, you'd submit documentation to the [loaders page](https://github.com/langflow-ai/langflow/blob/dev/docs/docs/components/loaders). -9. Submit your changes as a pull request. The Langflow team will have a look, suggest changes, and add your component to Langflow. - -## User Sharing {#34ac32e11f344eab892b94531a21d2c9} - - ---- - +This approach allows you to work on the backend and frontend independently, with hot-reloading for faster development. -You might want to share and test your custom component with others, but don't need it merged into the main source code. +## Contribute documentation +The documentation is built using [Docusaurus](https://docusaurus.io/) and written in [Markdown](https://docusaurus.io/docs/markdown-features). -If so, you can share your component on the Langflow store. +### Prerequisites +* [Node.js](https://nodejs.org/en/download/package-manager) -1. [Register at the Langflow store](https://www.langflow.store/login/). +### Clone the Langflow repository +1. Navigate to the [Langflow GitHub repository](https://github.com/langflow-ai/langflow), and then click **Fork**. -2. Undergo pre-validation before receiving an API key. +2. Add the new remote to your local repository on your local machine: +```bash +git remote add fork https://github.com//langflow.git +``` -3. To deploy your amazing component directly to the Langflow store, without it being merged into the main source code, navigate to your flow, and then click **Share**. The share window appears: +3. To run the documentation locally, run the following commands: +```bash +cd docs +npm install +npm run start +``` -![](./683296796.png) +The documentation will be available at `localhost:3000` and all the files are located in the `docs/docs` folder. +## Open a pull request -4. Choose whether you want to flow to be public or private. You can also **Export** your flow as a JSON file from this window. When you're ready to share the flow, click **Share Flow**. You should see a **Flow shared successfully** popup. +Once you have written and manually tested your changes with `make lint` and `make unit_tests`, open a pull request to send your changes upstream to the main Langflow repository. +1. Open a new GitHub pull request with your patch against the `main` branch. +2. Ensure the PR title follows semantic commit conventions. For example, features are `feat: add new feature` and fixes are `fix: correct issue with X`. +3. A Langflow maintainer will review your pull request. Thanks for your contribution! -5. To confirm, navigate to the **Langflow Store** and filter results by **Created By Me**. You should see your new flow on the **Langflow Store**. +Some additional guidance on pull request titles: +* Ensure the pull request description clearly describes the problem and solution. If the PR fixes an issue, include a link to the fixed issue in the PR description with `Fixes #1234`. +* Pull request titles appear in Langflow's release notes, so they should explain what the PR does as explicitly as possible. +* Pull requests should strive to fix one thing **only**, and should contain a good description of what is being fixed. +For more information, see the [Python Developer's Guide](https://devguide.python.org/getting-started/pull-request-lifecycle/index.html#making-good-commits). \ No newline at end of file diff --git a/docs/docs/Contributing/contributing-telemetry.md b/docs/docs/Contributing/contributing-telemetry.md index 21c77bc8e2d8..c374565caace 100644 --- a/docs/docs/Contributing/contributing-telemetry.md +++ b/docs/docs/Contributing/contributing-telemetry.md @@ -4,33 +4,15 @@ sidebar_position: 0 slug: /contributing-telemetry --- - - -:::info - -This page may contain outdated information. It will be updated as soon as possible. - -::: - - - - -Our system uses anonymous telemetry to collect essential usage statistics to enhance functionality and user experience. This data helps us identify commonly used features and areas needing improvement, ensuring our development efforts align with what you need. - - -:::note +Langflow uses anonymous telemetry to collect essential usage statistics to enhance functionality and the user experience. This data helps us identify popular features and areas that need improvement, and ensures development efforts align with what you need. We respect your privacy and are committed to protecting your data. We do not collect any personal information or sensitive data. All telemetry data is anonymized and used solely for improving Langflow. -You can opt-out of telemetry by setting the `LANGFLOW_DO_NOT_TRACK` or `DO_NOT_TRACK` environment variable to `true` before running Langflow. This will disable telemetry data collection. - -::: - +## Opt out of telemetry +To opt out of telemetry, set the `LANGFLOW_DO_NOT_TRACK` or `DO_NOT_TRACK` environment variable to `true` before running Langflow. This disables telemetry data collection. - -## Data Collected Includes {#1734ed50fb4a4a45aaa84185b44527ca} - +## Data that Langflow collects ### Run {#2d427dca4f0148ae867997f6789e8bfb} @@ -66,5 +48,3 @@ You can opt-out of telemetry by setting the `LANGFLOW_DO_NOT_TRACK` or `DO_NO - **Success**: Whether the component operated successfully, which helps in quality control. - **ErrorMessage**: Details of any errors encountered, crucial for debugging and improvement. -This telemetry data is crucial for enhancing Langflow and ensuring that our development efforts align with your needs. Your feedback and suggestions are invaluable in shaping the future of Langflow, and we appreciate your support in making Langflow better for everyone. - diff --git a/docs/docs/Deployment/cloud_deploy.svg b/docs/docs/Deployment/cloud_deploy.svg new file mode 100644 index 000000000000..b55ef3c6d456 --- /dev/null +++ b/docs/docs/Deployment/cloud_deploy.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/docs/docs/Deployment/deployment-gcp.md b/docs/docs/Deployment/deployment-gcp.md index 70644a429902..167c6ef3589b 100644 --- a/docs/docs/Deployment/deployment-gcp.md +++ b/docs/docs/Deployment/deployment-gcp.md @@ -4,43 +4,37 @@ sidebar_position: 3 slug: /deployment-gcp --- -:::info +# Deploy on Google Cloud Platform -This page may contain outdated information. It will be updated as soon as possible. +To deploy Langflow on Google Cloud Platform using Cloud Shell, use the below script. +The script will guide you through setting up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow dev environment in GCP. -::: +## Prerequisites -## Deploy on Google Cloud Platform {#4ee01cda736c4f7396936409f23cdb52} +* A GCP account with the necessary permissions to create resources +* A project on GCP where you want to deploy Langflow ---- +## Deploy Langflow in GCP -### Run Langflow from a New Google Cloud Project {#ce729796d7404ccdb627bee47d6a4399} +1. Click below to launch Cloud Shell. -This guide will help you set up a Langflow development VM in a Google Cloud Platform project using Google Cloud Shell. +[![GCP Deploy](./cloud_deploy.svg) Deploy to Google Cloud](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial.md) -:::info - -When Cloud Shell opens, be sure to select Trust repo. Some gcloud commands might not run in an ephemeral Cloud Shell environment. - -::: +2. Click **Trust repo**. Some gcloud commands might not run in an ephemeral Cloud Shell environment. +3. Click **Start** and follow the tutorial to deploy Langflow. -### Standard VM {#245b47b450dd4159a5c56a5124bab84f} +## Spot/Preemptible Instance -[![GCP Deploy](https://camo.githubusercontent.com/c1a4a499c1d93d7038fd7af8c4f3fce222050f3bbdf4275dafbfde2491e4b8c4/68747470733a2f2f677374617469632e636f6d2f636c6f75647373682f696d616765732f6f70656e2d62746e2e737667)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/langflow-ai/langflow&working_dir=scripts/gcp&shellonly=true&tutorial=walkthroughtutorial.md) +When running a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts. -This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment. +## Pricing (approximate) ---- - -## Spot/Preemptible Instance {#de9b8f7c71284cbb98e8137a3c44553d} - -When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts. +:::info ---- +For more information, see the [GCP Pricing Calculator](https://cloud.google.com/products/calculator?hl=en). -## Pricing (approximate) {#2289f4ba9f544e6e9d4b915ef5aacd24} +::: -> For a more accurate breakdown of costs, please use the GCP Pricing Calculator | Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes | | ------------------ | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | -------------------------------------------------------------------------- | diff --git a/docs/docs/Getting-Started/getting-started-common-installation-issues.md b/docs/docs/Getting-Started/getting-started-common-installation-issues.md deleted file mode 100644 index 4be683cce422..000000000000 --- a/docs/docs/Getting-Started/getting-started-common-installation-issues.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: ❗️ Common Installation Issues -sidebar_position: 3 -slug: /getting-started-common-installation-issues ---- - - - -This is a list of possible issues that you may encounter when installing Langflow and how to solve them. - - ---- - - -```bash -> No module named 'langflow.__main__' -``` - -1. Run `python -m langflow run` instead of `langflow run`. -2. If that doesn't work, reinstall Langflow with `_python -m pip install langflow --pre -U`. -3. If that doesn't work, reinstall Langflow and its dependencies with `python -m pip install langflow --pre -U --force-reinstall`. - -When you try to run Langflow using the command `langflow run`, you may encounter the following error: - - -```bash -> langflow runTraceback (most recent call last): File ".../langflow", line 5, in from langflow.__main__ import mainModuleNotFoundError: No module named 'langflow.__main__' -``` - - -There are two possible reasons for this error: - -1. You've installed Langflow using `pip install langflow` but you already had a previous version of Langflow installed in your system. In this case, you might be running the wrong executable. To solve this issue, run the correct executable by running `python -m langflow run` instead of `langflow run`. If that doesn't work, try uninstalling and reinstalling Langflow with `python -m pip install langflow --pre -U`. -2. Some version conflicts might have occurred during the installation process. Run `python -m pip install langflow --pre -U --force-reinstall` to reinstall Langflow and its dependencies. - -```bash -> Something went wrong running migrations. Please, run 'langflow migration --fix' -``` - - -Clear the cache by deleting the contents of the cache folder. - - -This folder can be found at: - -- **Linux or WSL2 on Windows**: `home//.cache/langflow/` -- **MacOS**: `/Users//Library/Caches/langflow/` - -This error can occur during Langflow upgrades when the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but will also erase your settings. - - -If you wish to retain your files, back them up before clearing the folder. - diff --git a/docs/docs/Getting-Started/getting-started-installation.md b/docs/docs/Getting-Started/getting-started-installation.md index 9a6872da3eb0..b60412ea3d59 100644 --- a/docs/docs/Getting-Started/getting-started-installation.md +++ b/docs/docs/Getting-Started/getting-started-installation.md @@ -1,97 +1,115 @@ --- -title: 📦 Installation +title: Install Langflow sidebar_position: 1 slug: /getting-started-installation --- +You can deploy Langflow either locally or as a hosted service with [**Datastax Langflow**](#datastax-langflow). +## Install Langflow locally -## **DataStax Langflow** {#a3b4bd8dec5a49ebbfca4828492133e9} +Install Langflow locally with [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/). +### Prerequisites ---- - - -The easiest way to get started with Langflow is through the DataStax Cloud Service! +* [Python 3.10 to 3.12](https://www.python.org/downloads/release/python-3100/) installed +* [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/) installed +* Before installing Langflow, we recommend creating a virtual environment to isolate your Python dependencies with [venv](https://docs.python.org/3/library/venv.html) or [conda](https://anaconda.org/anaconda/conda) +### Install Langflow with pip or pipx +Install Langflow with pip: -**DataStax Langflow** is a hosted version of Langflow integrated with [Astra DB](https://www.datastax.com/products/datastax-astra). Be up and running in minutes with no installation or setup required. [Sign up for free](https://astra.datastax.com/signup?type=langflow). - - -![](./602374500.png) - - -## **Install Langflow Locally** {#ef364ee864c545649d248113ad7d3038} - +```bash +python -m pip install langflow +``` ---- +Install Langflow with pipx using the Python 3.10 executable: +```bash +pipx install langflow --python python3.10 +``` -:::caution +## Run Langflow -Langflow **requires** Python version 3.10 or greater and [pip](https://pypi.org/project/pip/) or [pipx](https://pipx.pypa.io/stable/installation/) to be installed on your system. +1. To run Langflow, enter the following command. -::: +```bash +python -m langflow run +``` +2. Confirm that a local Langflow instance starts by visiting `http://127.0.0.1:7860` in a Chromium-based browser. +Now that Langflow is running, follow the [Quickstart](/getting-started-quickstart) to create your first flow. +## Manage Langflow versions -Install Langflow with pip: - +To upgrade Langflow to the latest version, use the pip upgrade command. ```bash python -m pip install langflow -U ``` - -Install Langflow with pipx: - +To install a specific verison of the Langflow package, add the required version to the command. ```bash -pipx install langflow --python python3.10 --fetch-missing-python +python -m pip install langflow==1.1 ``` +To reinstall Langflow and all of its dependencies, add the `--force-reinstall` flag to the command. -Pipx can fetch the missing Python version for you with `--fetch-missing-python`, but you can also install the Python version manually. Use `--force-reinstall` to ensure you have the latest version of Langflow and its dependencies. - - -## Having a problem? {#86a16dad1d6e481cafb90efea2b9ff93} - - ---- +```bash +python -m pip install langflow --force-reinstall +``` +## DataStax Langflow {#datastax-langflow} -If you encounter a problem, see [Common Installation Issues](/getting-started-common-installation-issues). +**DataStax Langflow** is a hosted version of Langflow integrated with [Astra DB](https://www.datastax.com/products/datastax-astra). Be up and running in minutes with no installation or setup required. [Sign up for free](https://astra.datastax.com/signup?type=langflow). +## Common installation issues -To get help in the Langflow CLI: +This is a list of possible issues that you may encounter when installing and running Langflow. +### No `langflow.__main__` module +When you try to run Langflow with the command `langflow run`, you encounter the following error: ```bash -python -m langflow --help +> No module named 'langflow.__main__' ``` +1. Run `python -m langflow run` instead of `langflow run`. +2. If that doesn't work, reinstall the latest Langflow version with `python -m pip install langflow -U`. +3. If that doesn't work, reinstall Langflow and its dependencies with `python -m pip install langflow --pre -U --force-reinstall`. +### Langflow runTraceback -## ⛓️ Run Langflow {#d318c4d486b74f5383c45b4f6859dcaa} - +When you try to run Langflow using the command `langflow run`, you encounter the following error: ---- +```bash +> langflow runTraceback (most recent call last): File ".../langflow", line 5, in from langflow.__main__ import mainModuleNotFoundError: No module named 'langflow.__main__' +``` +There are two possible reasons for this error: -1. To run Langflow, enter the following command. +1. You've installed Langflow using `pip install langflow` but you already had a previous version of Langflow installed in your system. In this case, you might be running the wrong executable. To solve this issue, run the correct executable by running `python -m langflow run` instead of `langflow run`. If that doesn't work, try uninstalling and reinstalling Langflow with `python -m pip install langflow --pre -U`. +2. Some version conflicts might have occurred during the installation process. Run `python -m pip install langflow --pre -U --force-reinstall` to reinstall Langflow and its dependencies. +### Something went wrong running migrations ```bash -python -m langflow run +> Something went wrong running migrations. Please, run 'langflow migration --fix' ``` +Clear the cache by deleting the contents of the cache folder. -2. Confirm that a local Langflow instance starts by visiting `http://127.0.0.1:7860` in a Chromium-based browser. +This folder can be found at: + +- **Linux or WSL2 on Windows**: `home//.cache/langflow/` +- **MacOS**: `/Users//Library/Caches/langflow/` + +This error can occur during Langflow upgrades when the new version can't override `langflow-pre.db` in `.cache/langflow/`. Clearing the cache removes this file but also erases your settings. +If you wish to retain your files, back them up before clearing the folder. -![](./221680153.png) -3. Continue on to the [Quickstart](/getting-started-quickstart). diff --git a/docs/docs/Getting-Started/getting-started-quickstart.md b/docs/docs/Getting-Started/getting-started-quickstart.md index 73bc7476350b..46f58d0aa485 100644 --- a/docs/docs/Getting-Started/getting-started-quickstart.md +++ b/docs/docs/Getting-Started/getting-started-quickstart.md @@ -1,5 +1,5 @@ --- -title: ⚡️ Quickstart +title: Quickstart sidebar_position: 2 slug: /getting-started-quickstart --- @@ -79,6 +79,6 @@ Well done! You've built your first prompt in Langflow. 🎉 By dragging Langflow components to your workspace, you can create all sorts of interesting behaviors. Here are a couple of examples: -- [Memory Chatbot](https://docs.langflow.org/starter-projects/memory-chatbot) -- [Blog Writer](https://docs.langflow.org/starter-projects/blog-writer) -- [Document QA](https://docs.langflow.org/starter-projects/document-qa) +- [Memory Chatbot](/starter-projects-memory-chatbot) +- [Blog Writer](/starter-projects-blog-writer) +- [Document QA](/starter-projects-document-qa) diff --git a/docs/docs/Getting-Started/welcome-to-langflow.md b/docs/docs/Getting-Started/welcome-to-langflow.md new file mode 100644 index 000000000000..663615acdf7f --- /dev/null +++ b/docs/docs/Getting-Started/welcome-to-langflow.md @@ -0,0 +1,40 @@ +--- +title: Welcome to Langflow +sidebar_position: 0 +slug: / +--- + +Langflow is a new, visual framework for building multi-agent and RAG applications. It is open-source, Python-powered, fully customizable, and LLM and vector store agnostic. + +Its intuitive interface allows for easy manipulation of AI building blocks, enabling developers to quickly prototype and turn their ideas into powerful, real-world solutions. + +Langflow empowers developers to rapidly prototype and build AI applications with its user-friendly interface and powerful features. Whether you're a seasoned AI developer or just starting out, Langflow provides the tools you need to bring your AI ideas to life. + +## Visual flow builder + +Langflow is an intuitive visual flow builder. This drag-and-drop interface allows developers to create complex AI workflows without writing extensive code. You can easily connect different components, such as prompts, language models, and data sources, to build sophisticated AI applications. + +![Langflow in action](./1160086633.gif) + +## Use cases + +Langflow can be used for a wide range of AI applications, including: + +* [Craft intelligent chatbots](/starter-projects-memory-chatbot) +* [Build document analysis systems](/starter-projects-document-qa) +* [Generate compelling content](/starter-projects-blog-writer) +* [Orchestrate multi-agent applications](/starter-projects-simple-agent) + +## Community and support + +Join Langflow's vibrant community of developers and AI enthusiasts. See the following resources to join discussions, share your projects, and get support: + +* [Contribute to Langflow](contributing-how-to-contribute) +* [Langflow Discord Server](https://discord.gg/EqksyE2EX9) +* [@langflow_ai](https://twitter.com/langflow_ai)  + +## Get started with Langflow + +- [Install Langflow](/getting-started-installation) +- [Quickstart](/getting-started-quickstart) + diff --git "a/docs/docs/Getting-Started/\360\237\221\213\302\240Welcome-to-Langflow.md" "b/docs/docs/Getting-Started/\360\237\221\213\302\240Welcome-to-Langflow.md" deleted file mode 100644 index b914952527c9..000000000000 --- "a/docs/docs/Getting-Started/\360\237\221\213\302\240Welcome-to-Langflow.md" +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: 👋 Welcome to Langflow -sidebar_position: 0 -slug: / ---- - - - -## Introduction {#e12578e9f465459592d89dbe47a54460} - - ---- - - -Langflow is a new, visual framework for building multi-agent and RAG applications. It is open-source, Python-powered, fully customizable, LLM and vector store agnostic. - - -Its intuitive interface allows for easy manipulation of AI building blocks, enabling developers to quickly prototype and turn their ideas into powerful, real-world solutions. - - -![](./1160086633.gif) - - -## 🚀 First steps {#c1ccb5e315b040edaa8d9a67f4960d81} - - ---- - -- [Install Langflow](/getting-started-installation) - Install and start a local Langflow server. -- [Quickstart](/getting-started-quickstart) - Create a flow and run it. -- [Langflow Workspace](/workspace) - Learn more about the Langflow Workspace. - -Learn more about the exciting changes in Langflow 1.0 in [A new chapter for Langflow](/whats-new-a-new-chapter-langflow). - diff --git a/docs/docs/Integrations/AssemblyAI_Flow.json b/docs/docs/Integrations/AssemblyAI_Flow.json new file mode 100644 index 000000000000..195bb1906abf --- /dev/null +++ b/docs/docs/Integrations/AssemblyAI_Flow.json @@ -0,0 +1,1431 @@ +{ + "name": "AssemblyAI Transcription and Speech AI Flow", + "icon": null, + "is_component": false, + "endpoint_name": null, + "data": { + "nodes": [ + { + "id": "Prompt-IO8Cq", + "type": "genericNode", + "position": { + "x": -1376.3296370680628, + "y": 928.8860970980681 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "_type": "Component", + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "trace_as_input": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "template", + "value": "Provide a brief summary of the transcript.", + "display_name": "Template", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "type": "prompt", + "_input_type": "PromptInput" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "Message" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [] + }, + "output_types": [], + "full_path": null, + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "prompt", + "hidden": null, + "display_name": "Prompt Message", + "method": "build_prompt", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "template" + ], + "beta": false, + "error": null, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "Prompt-IO8Cq" + }, + "selected": false, + "width": 384, + "height": 324, + "positionAbsolute": { + "x": -1376.3296370680628, + "y": 928.8860970980681 + }, + "dragging": false + }, + { + "id": "AssemblyAITranscriptionJobCreator-Idt7P", + "type": "genericNode", + "position": { + "x": -1957.7132501771657, + "y": 470.79685053457587 + }, + "data": { + "type": "AssemblyAITranscriptionJobCreator", + "node": { + "template": { + "_type": "Component", + "audio_file": { + "trace_as_metadata": true, + "file_path": "fa69381c-d1c4-4535-bc23-bc2fb4956e1e/2024-09-26_16-47-01_sports_injuries.mp3", + "fileTypes": [ + "3ga", + "8svx", + "aac", + "ac3", + "aif", + "aiff", + "alac", + "amr", + "ape", + "au", + "dss", + "flac", + "flv", + "m4a", + "m4b", + "m4p", + "m4r", + "mp3", + "mpga", + "ogg", + "oga", + "mogg", + "opus", + "qcp", + "tta", + "voc", + "wav", + "wma", + "wv", + "webm", + "mts", + "m2ts", + "ts", + "mov", + "mp2", + "mp4", + "m4p", + "m4v", + "mxf" + ], + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "audio_file", + "value": "sports_injuries.mp3", + "display_name": "Audio File", + "advanced": false, + "dynamic": false, + "info": "The audio file to transcribe", + "title_case": false, + "type": "file", + "_input_type": "FileInput", + "load_from_db": false + }, + "api_key": { + "load_from_db": false, + "required": false, + "placeholder": "", + "show": true, + "name": "api_key", + "value": null, + "display_name": "Assembly API Key", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + "title_case": false, + "password": true, + "type": "str", + "_input_type": "SecretStrInput" + }, + "audio_file_url": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "audio_file_url", + "value": "", + "display_name": "Audio File URL", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "The URL of the audio file to transcribe (Can be used instead of a File)", + "title_case": false, + "type": "str", + "_input_type": "MessageTextInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import os\n\nimport assemblyai as aai\nfrom loguru import logger\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DropdownInput, FileInput, MessageTextInput, Output, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass AssemblyAITranscriptionJobCreator(Component):\n display_name = \"AssemblyAI Start Transcript\"\n description = \"Create a transcription job for an audio file using AssemblyAI with advanced options\"\n documentation = \"https://www.assemblyai.com/docs\"\n icon = \"AssemblyAI\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Assembly API Key\",\n info=\"Your AssemblyAI API key. You can get one from https://www.assemblyai.com/\",\n ),\n FileInput(\n name=\"audio_file\",\n display_name=\"Audio File\",\n file_types=[\n \"3ga\",\n \"8svx\",\n \"aac\",\n \"ac3\",\n \"aif\",\n \"aiff\",\n \"alac\",\n \"amr\",\n \"ape\",\n \"au\",\n \"dss\",\n \"flac\",\n \"flv\",\n \"m4a\",\n \"m4b\",\n \"m4p\",\n \"m4r\",\n \"mp3\",\n \"mpga\",\n \"ogg\",\n \"oga\",\n \"mogg\",\n \"opus\",\n \"qcp\",\n \"tta\",\n \"voc\",\n \"wav\",\n \"wma\",\n \"wv\",\n \"webm\",\n \"mts\",\n \"m2ts\",\n \"ts\",\n \"mov\",\n \"mp2\",\n \"mp4\",\n \"m4p\",\n \"m4v\",\n \"mxf\",\n ],\n info=\"The audio file to transcribe\",\n ),\n MessageTextInput(\n name=\"audio_file_url\",\n display_name=\"Audio File URL\",\n info=\"The URL of the audio file to transcribe (Can be used instead of a File)\",\n advanced=True,\n ),\n DropdownInput(\n name=\"speech_model\",\n display_name=\"Speech Model\",\n options=[\n \"best\",\n \"nano\",\n ],\n value=\"best\",\n info=\"The speech model to use for the transcription\",\n advanced=True,\n ),\n BoolInput(\n name=\"language_detection\",\n display_name=\"Automatic Language Detection\",\n info=\"Enable automatic language detection\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"language_code\",\n display_name=\"Language\",\n info=\"\"\"\n The language of the audio file. Can be set manually if automatic language detection is disabled. \n See https://www.assemblyai.com/docs/getting-started/supported-languages for a list of supported language codes.\n \"\"\",\n advanced=True,\n ),\n BoolInput(\n name=\"speaker_labels\",\n display_name=\"Enable Speaker Labels\",\n info=\"Enable speaker diarization\",\n ),\n MessageTextInput(\n name=\"speakers_expected\",\n display_name=\"Expected Number of Speakers\",\n info=\"Set the expected number of speakers (optional, enter a number)\",\n advanced=True,\n ),\n BoolInput(\n name=\"punctuate\",\n display_name=\"Punctuate\",\n info=\"Enable automatic punctuation\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"format_text\",\n display_name=\"Format Text\",\n info=\"Enable text formatting\",\n advanced=True,\n value=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Transcript ID\", name=\"transcript_id\", method=\"create_transcription_job\"),\n ]\n\n def create_transcription_job(self) -> Data:\n aai.settings.api_key = self.api_key\n\n # Convert speakers_expected to int if it's not empty\n speakers_expected = None\n if self.speakers_expected and self.speakers_expected.strip():\n try:\n speakers_expected = int(self.speakers_expected)\n except ValueError:\n self.status = \"Error: Expected Number of Speakers must be a valid integer\"\n return Data(data={\"error\": \"Error: Expected Number of Speakers must be a valid integer\"})\n\n language_code = self.language_code if self.language_code else None\n\n config = aai.TranscriptionConfig(\n speech_model=self.speech_model,\n language_detection=self.language_detection,\n language_code=language_code,\n speaker_labels=self.speaker_labels,\n speakers_expected=speakers_expected,\n punctuate=self.punctuate,\n format_text=self.format_text,\n )\n\n audio = None\n if self.audio_file:\n if self.audio_file_url:\n logger.warning(\"Both an audio file an audio URL were specified. The audio URL was ignored.\")\n\n # Check if the file exists\n if not os.path.exists(self.audio_file):\n self.status = \"Error: Audio file not found\"\n return Data(data={\"error\": \"Error: Audio file not found\"})\n audio = self.audio_file\n elif self.audio_file_url:\n audio = self.audio_file_url\n else:\n self.status = \"Error: Either an audio file or an audio URL must be specified\"\n return Data(data={\"error\": \"Error: Either an audio file or an audio URL must be specified\"})\n\n try:\n transcript = aai.Transcriber().submit(audio, config=config)\n\n if transcript.error:\n self.status = transcript.error\n return Data(data={\"error\": transcript.error})\n else:\n result = Data(data={\"transcript_id\": transcript.id})\n self.status = result\n return result\n except Exception as e:\n self.status = f\"An error occurred: {str(e)}\"\n return Data(data={\"error\": f\"An error occurred: {str(e)}\"})\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "format_text": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "format_text", + "value": true, + "display_name": "Format Text", + "advanced": true, + "dynamic": false, + "info": "Enable text formatting", + "title_case": false, + "type": "bool", + "_input_type": "BoolInput" + }, + "language_code": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "language_code", + "value": "", + "display_name": "Language", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "\n The language of the audio file. Can be set manually if automatic language detection is disabled. \n See https://www.assemblyai.com/docs/getting-started/supported-languages for a list of supported language codes.\n ", + "title_case": false, + "type": "str", + "_input_type": "MessageTextInput" + }, + "language_detection": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "language_detection", + "value": false, + "display_name": "Automatic Language Detection", + "advanced": true, + "dynamic": false, + "info": "Enable automatic language detection", + "title_case": false, + "type": "bool", + "_input_type": "BoolInput" + }, + "punctuate": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "punctuate", + "value": true, + "display_name": "Punctuate", + "advanced": true, + "dynamic": false, + "info": "Enable automatic punctuation", + "title_case": false, + "type": "bool", + "_input_type": "BoolInput" + }, + "speaker_labels": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "speaker_labels", + "value": true, + "display_name": "Enable Speaker Labels", + "advanced": false, + "dynamic": false, + "info": "Enable speaker diarization", + "title_case": false, + "type": "bool", + "_input_type": "BoolInput", + "load_from_db": false + }, + "speakers_expected": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "speakers_expected", + "value": "", + "display_name": "Expected Number of Speakers", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Set the expected number of speakers (optional, enter a number)", + "title_case": false, + "type": "str", + "_input_type": "MessageTextInput" + }, + "speech_model": { + "trace_as_metadata": true, + "options": [ + "best", + "nano" + ], + "combobox": false, + "required": false, + "placeholder": "", + "show": true, + "name": "speech_model", + "value": "best", + "display_name": "Speech Model", + "advanced": true, + "dynamic": false, + "info": "The speech model to use for the transcription", + "title_case": false, + "type": "str", + "_input_type": "DropdownInput" + } + }, + "description": "Create a transcription job for an audio file using AssemblyAI with advanced options", + "icon": "AssemblyAI", + "base_classes": [ + "Data" + ], + "display_name": "AssemblyAI Start Transcript", + "documentation": "https://www.assemblyai.com/docs", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": true, + "outputs": [ + { + "types": [ + "Data" + ], + "selected": "Data", + "name": "transcript_id", + "display_name": "Transcript ID", + "method": "create_transcription_job", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "api_key", + "audio_file", + "audio_file_url", + "speech_model", + "language_detection", + "language_code", + "speaker_labels", + "speakers_expected", + "punctuate", + "format_text" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "AssemblyAITranscriptionJobCreator-Idt7P", + "description": "Create a transcription job for an audio file using AssemblyAI with advanced options", + "display_name": "AssemblyAI Start Transcript" + }, + "selected": false, + "width": 384, + "height": 482, + "positionAbsolute": { + "x": -1957.7132501771657, + "y": 470.79685053457587 + }, + "dragging": false + }, + { + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "type": "genericNode", + "position": { + "x": -1408.0967182254753, + "y": 461.5039554434261 + }, + "data": { + "type": "AssemblyAITranscriptionJobPoller", + "node": { + "template": { + "_type": "Component", + "transcript_id": { + "trace_as_metadata": true, + "list": false, + "trace_as_input": true, + "required": false, + "placeholder": "", + "show": true, + "name": "transcript_id", + "value": "", + "display_name": "Transcript ID", + "advanced": false, + "input_types": [ + "Data" + ], + "dynamic": false, + "info": "The ID of the transcription job to poll", + "title_case": false, + "type": "other", + "_input_type": "DataInput" + }, + "api_key": { + "load_from_db": false, + "required": false, + "placeholder": "", + "show": true, + "name": "api_key", + "value": null, + "display_name": "Assembly API Key", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + "title_case": false, + "password": true, + "type": "str", + "_input_type": "SecretStrInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import assemblyai as aai\n\nfrom langflow.custom import Component\nfrom langflow.io import DataInput, FloatInput, Output, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass AssemblyAITranscriptionJobPoller(Component):\n display_name = \"AssemblyAI Poll Transcript\"\n description = \"Poll for the status of a transcription job using AssemblyAI\"\n documentation = \"https://www.assemblyai.com/docs\"\n icon = \"AssemblyAI\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Assembly API Key\",\n info=\"Your AssemblyAI API key. You can get one from https://www.assemblyai.com/\",\n ),\n DataInput(\n name=\"transcript_id\",\n display_name=\"Transcript ID\",\n info=\"The ID of the transcription job to poll\",\n ),\n FloatInput(\n name=\"polling_interval\",\n display_name=\"Polling Interval\",\n value=3.0,\n info=\"The polling interval in seconds\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Transcription Result\", name=\"transcription_result\", method=\"poll_transcription_job\"),\n ]\n\n def poll_transcription_job(self) -> Data:\n \"\"\"Polls the transcription status until completion and returns the Data.\"\"\"\n aai.settings.api_key = self.api_key\n aai.settings.polling_interval = self.polling_interval\n\n # check if it's an error message from the previous step\n if self.transcript_id.data.get(\"error\"):\n self.status = self.transcript_id.data[\"error\"]\n return self.transcript_id\n\n try:\n transcript = aai.Transcript.get_by_id(self.transcript_id.data[\"transcript_id\"])\n except Exception as e:\n error = f\"Getting transcription failed: {str(e)}\"\n self.status = error\n return Data(data={\"error\": error})\n\n if transcript.status == aai.TranscriptStatus.completed:\n json_response = transcript.json_response\n text = json_response.pop(\"text\", None)\n utterances = json_response.pop(\"utterances\", None)\n transcript_id = json_response.pop(\"id\", None)\n sorted_data = { \"text\": text, \"utterances\": utterances, \"id\": transcript_id}\n sorted_data.update(json_response)\n data = Data(data=sorted_data)\n self.status = data\n return data\n else:\n self.status = transcript.error\n return Data(data={\"error\": transcript.error})\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "polling_interval": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "polling_interval", + "value": 3, + "display_name": "Polling Interval", + "advanced": true, + "dynamic": false, + "info": "The polling interval in seconds", + "title_case": false, + "type": "float", + "_input_type": "FloatInput" + } + }, + "description": "Poll for the status of a transcription job using AssemblyAI", + "icon": "AssemblyAI", + "base_classes": [ + "Data" + ], + "display_name": "AssemblyAI Poll Transcript", + "documentation": "https://www.assemblyai.com/docs", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Data" + ], + "selected": "Data", + "name": "transcription_result", + "display_name": "Transcription Result", + "method": "poll_transcription_job", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "api_key", + "transcript_id", + "polling_interval" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "description": "Poll for the status of a transcription job using AssemblyAI", + "display_name": "AssemblyAI Poll Transcript" + }, + "selected": false, + "width": 384, + "height": 368, + "positionAbsolute": { + "x": -1408.0967182254753, + "y": 461.5039554434261 + }, + "dragging": false + }, + { + "id": "AssemblyAIGetSubtitles-3sjU6", + "type": "genericNode", + "position": { + "x": -867.5862690424032, + "y": 368.91683022842676 + }, + "data": { + "type": "AssemblyAIGetSubtitles", + "node": { + "template": { + "_type": "Component", + "transcription_result": { + "trace_as_metadata": true, + "list": false, + "trace_as_input": true, + "required": false, + "placeholder": "", + "show": true, + "name": "transcription_result", + "value": "", + "display_name": "Transcription Result", + "advanced": false, + "input_types": [ + "Data" + ], + "dynamic": false, + "info": "The transcription result from AssemblyAI", + "title_case": false, + "type": "other", + "_input_type": "DataInput" + }, + "api_key": { + "load_from_db": false, + "required": false, + "placeholder": "", + "show": true, + "name": "api_key", + "value": null, + "display_name": "Assembly API Key", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + "title_case": false, + "password": true, + "type": "str", + "_input_type": "SecretStrInput" + }, + "chars_per_caption": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "chars_per_caption", + "value": 0, + "display_name": "Characters per Caption", + "advanced": true, + "dynamic": false, + "info": "The maximum number of characters per caption (0 for no limit)", + "title_case": false, + "type": "int", + "_input_type": "IntInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import assemblyai as aai\n\nfrom langflow.custom import Component\nfrom langflow.io import DataInput, DropdownInput, IntInput, Output, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass AssemblyAIGetSubtitles(Component):\n display_name = \"AssemblyAI Get Subtitles\"\n description = \"Export your transcript in SRT or VTT format for subtitles and closed captions\"\n documentation = \"https://www.assemblyai.com/docs\"\n icon = \"AssemblyAI\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Assembly API Key\",\n info=\"Your AssemblyAI API key. You can get one from https://www.assemblyai.com/\",\n ),\n DataInput(\n name=\"transcription_result\",\n display_name=\"Transcription Result\",\n info=\"The transcription result from AssemblyAI\",\n ),\n DropdownInput(\n name=\"subtitle_format\",\n display_name=\"Subtitle Format\",\n options=[\"srt\", \"vtt\"],\n value=\"srt\",\n info=\"The format of the captions (SRT or VTT)\",\n ),\n IntInput(\n name=\"chars_per_caption\",\n display_name=\"Characters per Caption\",\n info=\"The maximum number of characters per caption (0 for no limit)\",\n value=0,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Subtitles\", name=\"subtitles\", method=\"get_subtitles\"),\n ]\n\n def get_subtitles(self) -> Data:\n aai.settings.api_key = self.api_key\n\n # check if it's an error message from the previous step\n if self.transcription_result.data.get(\"error\"):\n self.status = self.transcription_result.data[\"error\"]\n return self.transcription_result\n\n try:\n transcript_id = self.transcription_result.data[\"id\"]\n transcript = aai.Transcript.get_by_id(transcript_id)\n except Exception as e:\n error = f\"Getting transcription failed: {str(e)}\"\n self.status = error\n return Data(data={\"error\": error})\n\n if transcript.status == aai.TranscriptStatus.completed:\n subtitles = None\n chars_per_caption = self.chars_per_caption if self.chars_per_caption > 0 else None\n if self.subtitle_format == \"srt\":\n subtitles = transcript.export_subtitles_srt(chars_per_caption)\n else:\n subtitles = transcript.export_subtitles_vtt(chars_per_caption)\n\n result = Data(\n subtitles=subtitles,\n format=self.subtitle_format,\n transcript_id=transcript_id,\n chars_per_caption=chars_per_caption,\n )\n\n self.status = result\n return result\n else:\n self.status = transcript.error\n return Data(data={\"error\": transcript.error})\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "subtitle_format": { + "trace_as_metadata": true, + "options": [ + "srt", + "vtt" + ], + "combobox": false, + "required": false, + "placeholder": "", + "show": true, + "name": "subtitle_format", + "value": "srt", + "display_name": "Subtitle Format", + "advanced": false, + "dynamic": false, + "info": "The format of the captions (SRT or VTT)", + "title_case": false, + "type": "str", + "_input_type": "DropdownInput" + } + }, + "description": "Export your transcript in SRT or VTT format for subtitles and closed captions", + "icon": "AssemblyAI", + "base_classes": [ + "Data" + ], + "display_name": "AssemblyAI Get Subtitles", + "documentation": "https://www.assemblyai.com/docs", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Data" + ], + "selected": "Data", + "name": "subtitles", + "display_name": "Subtitles", + "method": "get_subtitles", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "api_key", + "transcription_result", + "subtitle_format", + "chars_per_caption" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "AssemblyAIGetSubtitles-3sjU6", + "description": "Export your transcript in SRT or VTT format for subtitles and closed captions", + "display_name": "AssemblyAI Get Subtitles" + }, + "selected": false, + "width": 384, + "height": 454, + "positionAbsolute": { + "x": -867.5862690424032, + "y": 368.91683022842676 + }, + "dragging": false + }, + { + "id": "AssemblyAIListTranscripts-3prc4", + "type": "genericNode", + "position": { + "x": -380.99808133361984, + "y": 401.2674645310267 + }, + "data": { + "type": "AssemblyAIListTranscripts", + "node": { + "template": { + "_type": "Component", + "api_key": { + "load_from_db": false, + "required": false, + "placeholder": "", + "show": true, + "name": "api_key", + "value": null, + "display_name": "Assembly API Key", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + "title_case": false, + "password": true, + "type": "str", + "_input_type": "SecretStrInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import assemblyai as aai\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass AssemblyAIListTranscripts(Component):\n display_name = \"AssemblyAI List Transcripts\"\n description = \"Retrieve a list of transcripts from AssemblyAI with filtering options\"\n documentation = \"https://www.assemblyai.com/docs\"\n icon = \"AssemblyAI\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Assembly API Key\",\n info=\"Your AssemblyAI API key. You can get one from https://www.assemblyai.com/\",\n ),\n IntInput(\n name=\"limit\",\n display_name=\"Limit\",\n info=\"Maximum number of transcripts to retrieve (default: 20, use 0 for all)\",\n value=20,\n ),\n DropdownInput(\n name=\"status_filter\",\n display_name=\"Status Filter\",\n options=[\"all\", \"queued\", \"processing\", \"completed\", \"error\"],\n value=\"all\",\n info=\"Filter by transcript status\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"created_on\",\n display_name=\"Created On\",\n info=\"Only get transcripts created on this date (YYYY-MM-DD)\",\n advanced=True,\n ),\n BoolInput(\n name=\"throttled_only\",\n display_name=\"Throttled Only\",\n info=\"Only get throttled transcripts, overrides the status filter\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Transcript List\", name=\"transcript_list\", method=\"list_transcripts\"),\n ]\n\n def list_transcripts(self) -> list[Data]:\n aai.settings.api_key = self.api_key\n\n params = aai.ListTranscriptParameters()\n if self.limit:\n params.limit = self.limit\n if self.status_filter != \"all\":\n params.status = self.status_filter\n if self.created_on and self.created_on.text:\n params.created_on = self.created_on.text\n if self.throttled_only:\n params.throttled_only = True\n\n try:\n transcriber = aai.Transcriber()\n\n def convert_page_to_data_list(page):\n return [Data(**t.dict()) for t in page.transcripts]\n\n if self.limit == 0:\n # paginate over all pages\n params.limit = 100\n page = transcriber.list_transcripts(params)\n transcripts = convert_page_to_data_list(page)\n\n while page.page_details.before_id_of_prev_url is not None:\n params.before_id = page.page_details.before_id_of_prev_url\n page = transcriber.list_transcripts(params)\n transcripts.extend(convert_page_to_data_list(page))\n else:\n # just one page\n page = transcriber.list_transcripts(params)\n transcripts = convert_page_to_data_list(page)\n\n self.status = transcripts\n return transcripts\n except Exception as e:\n error_data = Data(data={\"error\": f\"An error occurred: {str(e)}\"})\n self.status = [error_data]\n return [error_data]\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "created_on": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "created_on", + "value": "", + "display_name": "Created On", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Only get transcripts created on this date (YYYY-MM-DD)", + "title_case": false, + "type": "str", + "_input_type": "MessageTextInput" + }, + "limit": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "limit", + "value": 20, + "display_name": "Limit", + "advanced": false, + "dynamic": false, + "info": "Maximum number of transcripts to retrieve (default: 20, use 0 for all)", + "title_case": false, + "type": "int", + "_input_type": "IntInput" + }, + "status_filter": { + "trace_as_metadata": true, + "options": [ + "all", + "queued", + "processing", + "completed", + "error" + ], + "combobox": false, + "required": false, + "placeholder": "", + "show": true, + "name": "status_filter", + "value": "all", + "display_name": "Status Filter", + "advanced": true, + "dynamic": false, + "info": "Filter by transcript status", + "title_case": false, + "type": "str", + "_input_type": "DropdownInput" + }, + "throttled_only": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "throttled_only", + "value": false, + "display_name": "Throttled Only", + "advanced": true, + "dynamic": false, + "info": "Only get throttled transcripts, overrides the status filter", + "title_case": false, + "type": "bool", + "_input_type": "BoolInput" + } + }, + "description": "Retrieve a list of transcripts from AssemblyAI with filtering options", + "icon": "AssemblyAI", + "base_classes": [ + "Data" + ], + "display_name": "AssemblyAI List Transcripts", + "documentation": "https://www.assemblyai.com/docs", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Data" + ], + "selected": "Data", + "name": "transcript_list", + "display_name": "Transcript List", + "method": "list_transcripts", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "api_key", + "limit", + "status_filter", + "created_on", + "throttled_only" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "AssemblyAIListTranscripts-3prc4", + "description": "Retrieve a list of transcripts from AssemblyAI with filtering options", + "display_name": "AssemblyAI List Transcripts" + }, + "selected": false, + "width": 384, + "height": 410, + "positionAbsolute": { + "x": -380.99808133361984, + "y": 401.2674645310267 + }, + "dragging": false + }, + { + "id": "AssemblyAILeMUR-jzwHZ", + "type": "genericNode", + "position": { + "x": -875.6482330011189, + "y": 887.1705799007382 + }, + "data": { + "type": "AssemblyAILeMUR", + "node": { + "template": { + "_type": "Component", + "transcription_result": { + "trace_as_metadata": true, + "list": false, + "trace_as_input": true, + "required": false, + "placeholder": "", + "show": true, + "name": "transcription_result", + "value": "", + "display_name": "Transcription Result", + "advanced": false, + "input_types": [ + "Data" + ], + "dynamic": false, + "info": "The transcription result from AssemblyAI", + "title_case": false, + "type": "other", + "_input_type": "DataInput" + }, + "api_key": { + "load_from_db": false, + "required": false, + "placeholder": "", + "show": true, + "name": "api_key", + "value": null, + "display_name": "Assembly API Key", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + "title_case": false, + "password": true, + "type": "str", + "_input_type": "SecretStrInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "import assemblyai as aai\n\nfrom langflow.custom import Component\nfrom langflow.io import DataInput, DropdownInput, FloatInput, IntInput, MultilineInput, Output, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass AssemblyAILeMUR(Component):\n display_name = \"AssemblyAI LeMUR\"\n description = \"Apply Large Language Models to spoken data using the AssemblyAI LeMUR framework\"\n documentation = \"https://www.assemblyai.com/docs/lemur\"\n icon = \"AssemblyAI\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Assembly API Key\",\n info=\"Your AssemblyAI API key. You can get one from https://www.assemblyai.com/\",\n advanced=False,\n ),\n DataInput(\n name=\"transcription_result\",\n display_name=\"Transcription Result\",\n info=\"The transcription result from AssemblyAI\",\n ),\n MultilineInput(\n name=\"prompt\",\n display_name=\"Input Prompt\",\n info=\"The text to prompt the model\",\n ),\n DropdownInput(\n name=\"final_model\",\n display_name=\"Final Model\",\n options=[\"claude3_5_sonnet\", \"claude3_opus\", \"claude3_haiku\", \"claude3_sonnet\"],\n value=\"claude3_5_sonnet\",\n info=\"The model that is used for the final prompt after compression is performed\",\n advanced=True,\n ),\n FloatInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n advanced=True,\n value=0.0,\n info=\"The temperature to use for the model\",\n ),\n IntInput(\n name=\"max_output_size\",\n display_name=\" Max Output Size\",\n advanced=True,\n value=2000,\n info=\"Max output size in tokens, up to 4000\",\n ),\n DropdownInput(\n name=\"endpoint\",\n display_name=\"Endpoint\",\n options=[\"task\", \"summary\", \"question-answer\"],\n value=\"task\",\n info=\"The LeMUR endpoint to use. For 'summary' and 'question-answer', no prompt input is needed. See https://www.assemblyai.com/docs/api-reference/lemur/ for more info.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"questions\",\n display_name=\"Questions\",\n info=\"Comma-separated list of your questions. Only used if Endpoint is 'question-answer'\",\n advanced=True,\n ),\n MultilineInput(\n name=\"transcript_ids\",\n display_name=\"Transcript IDs\",\n info=\"Comma-separated list of transcript IDs. LeMUR can perform actions over multiple transcripts. If provided, the Transcription Result is ignored.\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"LeMUR Response\", name=\"lemur_response\", method=\"run_lemur\"),\n ]\n\n def run_lemur(self) -> Data:\n \"\"\"Use the LeMUR task endpoint to input the LLM prompt.\"\"\"\n aai.settings.api_key = self.api_key\n\n if not self.transcription_result and not self.transcript_ids:\n error = \"Either a Transcription Result or Transcript IDs must be provided\"\n self.status = error\n return Data(data={\"error\": error})\n elif self.transcription_result and self.transcription_result.data.get(\"error\"):\n # error message from the previous step\n self.status = self.transcription_result.data[\"error\"]\n return self.transcription_result\n elif self.endpoint == \"task\" and not self.prompt:\n self.status = \"No prompt specified for the task endpoint\"\n return Data(data={\"error\": \"No prompt specified\"})\n elif self.endpoint == \"question-answer\" and not self.questions:\n error = \"No Questions were provided for the question-answer endpoint\"\n self.status = error\n return Data(data={\"error\": error})\n\n # Check for valid transcripts\n transcript_ids = None\n if self.transcription_result and \"id\" in self.transcription_result.data:\n transcript_ids = [self.transcription_result.data[\"id\"]]\n elif self.transcript_ids:\n transcript_ids = self.transcript_ids.split(\",\")\n transcript_ids = [t.strip() for t in transcript_ids]\n \n if not transcript_ids:\n error = \"Either a valid Transcription Result or valid Transcript IDs must be provided\"\n self.status = error\n return Data(data={\"error\": error})\n\n # Get TranscriptGroup and check if there is any error\n transcript_group = aai.TranscriptGroup(transcript_ids=transcript_ids)\n transcript_group, failures = transcript_group.wait_for_completion(return_failures=True)\n if failures:\n error = f\"Getting transcriptions failed: {failures[0]}\"\n self.status = error\n return Data(data={\"error\": error})\n \n for t in transcript_group.transcripts:\n if t.status == aai.TranscriptStatus.error:\n self.status = t.error\n return Data(data={\"error\": t.error})\n\n # Perform LeMUR action\n try:\n response = self.perform_lemur_action(transcript_group, self.endpoint)\n result = Data(data=response)\n self.status = result\n return result\n except Exception as e:\n error = f\"An Error happened: {str(e)}\"\n self.status = error\n return Data(data={\"error\": error})\n\n def perform_lemur_action(self, transcript_group: aai.TranscriptGroup, endpoint: str) -> dict:\n print(\"Endpoint:\", endpoint, type(endpoint))\n if endpoint == \"task\":\n result = transcript_group.lemur.task(\n prompt=self.prompt,\n final_model=self.get_final_model(self.final_model),\n temperature=self.temperature,\n max_output_size=self.max_output_size,\n )\n elif endpoint == \"summary\":\n result = transcript_group.lemur.summarize(\n final_model=self.get_final_model(self.final_model),\n temperature=self.temperature,\n max_output_size=self.max_output_size,\n )\n elif endpoint == \"question-answer\":\n questions = self.questions.split(\",\")\n questions = [aai.LemurQuestion(question=q) for q in questions]\n result = transcript_group.lemur.question(\n questions=questions,\n final_model=self.get_final_model(self.final_model),\n temperature=self.temperature,\n max_output_size=self.max_output_size,\n )\n else:\n raise ValueError(f\"Endpoint not supported: {endpoint}\")\n\n return result.dict()\n \n def get_final_model(self, model_name: str) -> aai.LemurModel:\n if model_name == \"claude3_5_sonnet\":\n return aai.LemurModel.claude3_5_sonnet\n elif model_name == \"claude3_opus\":\n return aai.LemurModel.claude3_opus\n elif model_name == \"claude3_haiku\":\n return aai.LemurModel.claude3_haiku\n elif model_name == \"claude3_sonnet\":\n return aai.LemurModel.claude3_sonnet\n else:\n raise ValueError(f\"Model name not supported: {model_name}\")\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "endpoint": { + "trace_as_metadata": true, + "options": [ + "task", + "summary", + "question-answer" + ], + "combobox": false, + "required": false, + "placeholder": "", + "show": true, + "name": "endpoint", + "value": "task", + "display_name": "Endpoint", + "advanced": true, + "dynamic": false, + "info": "The LeMUR endpoint to use. For 'summary' and 'question-answer', no prompt input is needed. See https://www.assemblyai.com/docs/api-reference/lemur/ for more info.", + "title_case": false, + "type": "str", + "_input_type": "DropdownInput" + }, + "final_model": { + "trace_as_metadata": true, + "options": [ + "claude3_5_sonnet", + "claude3_opus", + "claude3_haiku", + "claude3_sonnet" + ], + "combobox": false, + "required": false, + "placeholder": "", + "show": true, + "name": "final_model", + "value": "claude3_5_sonnet", + "display_name": "Final Model", + "advanced": true, + "dynamic": false, + "info": "The model that is used for the final prompt after compression is performed", + "title_case": false, + "type": "str", + "_input_type": "DropdownInput" + }, + "max_output_size": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "max_output_size", + "value": 2000, + "display_name": " Max Output Size", + "advanced": true, + "dynamic": false, + "info": "Max output size in tokens, up to 4000", + "title_case": false, + "type": "int", + "_input_type": "IntInput" + }, + "prompt": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "prompt", + "value": "", + "display_name": "Input Prompt", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "The text to prompt the model", + "title_case": false, + "type": "str", + "_input_type": "MultilineInput" + }, + "questions": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "questions", + "value": "", + "display_name": "Questions", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Comma-separated list of your questions. Only used if Endpoint is 'question-answer'", + "title_case": false, + "type": "str", + "_input_type": "MultilineInput" + }, + "temperature": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "temperature", + "value": 0, + "display_name": "Temperature", + "advanced": true, + "dynamic": false, + "info": "The temperature to use for the model", + "title_case": false, + "type": "float", + "_input_type": "FloatInput" + }, + "transcript_ids": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "transcript_ids", + "value": "", + "display_name": "Transcript IDs", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Comma-separated list of transcript IDs. LeMUR can perform actions over multiple transcripts. If provided, the Transcription Result is ignored.", + "title_case": false, + "type": "str", + "_input_type": "MultilineInput" + } + }, + "description": "Apply Large Language Models to spoken data using the AssemblyAI LeMUR framework", + "icon": "AssemblyAI", + "base_classes": [ + "Data" + ], + "display_name": "AssemblyAI LeMUR", + "documentation": "https://www.assemblyai.com/docs/lemur", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Data" + ], + "selected": "Data", + "name": "lemur_response", + "display_name": "LeMUR Response", + "method": "run_lemur", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "api_key", + "transcription_result", + "prompt", + "final_model", + "temperature", + "max_output_size", + "endpoint", + "questions", + "transcript_ids" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "AssemblyAILeMUR-jzwHZ", + "description": "Apply Large Language Models to spoken data using the AssemblyAI LeMUR framework", + "display_name": "AssemblyAI LeMUR" + }, + "selected": false, + "width": 384, + "height": 454, + "positionAbsolute": { + "x": -875.6482330011189, + "y": 887.1705799007382 + }, + "dragging": false + }, + { + "id": "ParseData-th7JM", + "type": "genericNode", + "position": { + "x": -862.5843195492909, + "y": -56.71774780191424 + }, + "data": { + "type": "ParseData", + "node": { + "template": { + "_type": "Component", + "data": { + "trace_as_metadata": true, + "list": false, + "trace_as_input": true, + "required": false, + "placeholder": "", + "show": true, + "name": "data", + "value": "", + "display_name": "Data", + "advanced": false, + "input_types": [ + "Data" + ], + "dynamic": false, + "info": "The data to convert to text.", + "title_case": false, + "type": "other", + "_input_type": "DataInput" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "sep": { + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "sep", + "value": "\n", + "display_name": "Separator", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "type": "str", + "_input_type": "StrInput" + }, + "template": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "name": "template", + "value": "{text}", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "title_case": false, + "type": "str", + "_input_type": "MultilineInput" + } + }, + "description": "Convert Data into plain text following a specified template.", + "icon": "braces", + "base_classes": [ + "Message" + ], + "display_name": "Parse Data", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "text", + "display_name": "Text", + "method": "parse_data", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "data", + "template", + "sep" + ], + "beta": false, + "edited": false, + "lf_version": "1.0.18" + }, + "id": "ParseData-th7JM" + }, + "selected": false, + "width": 384, + "height": 368, + "positionAbsolute": { + "x": -862.5843195492909, + "y": -56.71774780191424 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "AssemblyAITranscriptionJobCreator-Idt7P", + "sourceHandle": "{œdataTypeœ:œAssemblyAITranscriptionJobCreatorœ,œidœ:œAssemblyAITranscriptionJobCreator-Idt7Pœ,œnameœ:œtranscript_idœ,œoutput_typesœ:[œDataœ]}", + "target": "AssemblyAITranscriptionJobPoller-F46nf", + "targetHandle": "{œfieldNameœ:œtranscript_idœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "data": { + "targetHandle": { + "fieldName": "transcript_id", + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "inputTypes": [ + "Data" + ], + "type": "other" + }, + "sourceHandle": { + "dataType": "AssemblyAITranscriptionJobCreator", + "id": "AssemblyAITranscriptionJobCreator-Idt7P", + "name": "transcript_id", + "output_types": [ + "Data" + ] + } + }, + "id": "reactflow__edge-AssemblyAITranscriptionJobCreator-Idt7P{œdataTypeœ:œAssemblyAITranscriptionJobCreatorœ,œidœ:œAssemblyAITranscriptionJobCreator-Idt7Pœ,œnameœ:œtranscript_idœ,œoutput_typesœ:[œDataœ]}-AssemblyAITranscriptionJobPoller-F46nf{œfieldNameœ:œtranscript_idœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "animated": false, + "className": "", + "selected": false + }, + { + "source": "AssemblyAITranscriptionJobPoller-F46nf", + "sourceHandle": "{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}", + "target": "AssemblyAIGetSubtitles-3sjU6", + "targetHandle": "{œfieldNameœ:œtranscription_resultœ,œidœ:œAssemblyAIGetSubtitles-3sjU6œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "data": { + "targetHandle": { + "fieldName": "transcription_result", + "id": "AssemblyAIGetSubtitles-3sjU6", + "inputTypes": [ + "Data" + ], + "type": "other" + }, + "sourceHandle": { + "dataType": "AssemblyAITranscriptionJobPoller", + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "name": "transcription_result", + "output_types": [ + "Data" + ] + } + }, + "id": "reactflow__edge-AssemblyAITranscriptionJobPoller-F46nf{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}-AssemblyAIGetSubtitles-3sjU6{œfieldNameœ:œtranscription_resultœ,œidœ:œAssemblyAIGetSubtitles-3sjU6œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "animated": false, + "className": "" + }, + { + "source": "AssemblyAITranscriptionJobPoller-F46nf", + "sourceHandle": "{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}", + "target": "ParseData-th7JM", + "targetHandle": "{œfieldNameœ:œdataœ,œidœ:œParseData-th7JMœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "data": { + "targetHandle": { + "fieldName": "data", + "id": "ParseData-th7JM", + "inputTypes": [ + "Data" + ], + "type": "other" + }, + "sourceHandle": { + "dataType": "AssemblyAITranscriptionJobPoller", + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "name": "transcription_result", + "output_types": [ + "Data" + ] + } + }, + "id": "reactflow__edge-AssemblyAITranscriptionJobPoller-F46nf{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}-ParseData-th7JM{œfieldNameœ:œdataœ,œidœ:œParseData-th7JMœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "animated": false, + "className": "" + }, + { + "source": "Prompt-IO8Cq", + "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-IO8Cqœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", + "target": "AssemblyAILeMUR-jzwHZ", + "targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œAssemblyAILeMUR-jzwHZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "prompt", + "id": "AssemblyAILeMUR-jzwHZ", + "inputTypes": [ + "Message" + ], + "type": "str" + }, + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-IO8Cq", + "name": "prompt", + "output_types": [ + "Message" + ] + } + }, + "id": "reactflow__edge-Prompt-IO8Cq{œdataTypeœ:œPromptœ,œidœ:œPrompt-IO8Cqœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AssemblyAILeMUR-jzwHZ{œfieldNameœ:œpromptœ,œidœ:œAssemblyAILeMUR-jzwHZœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "animated": false, + "className": "" + }, + { + "source": "AssemblyAITranscriptionJobPoller-F46nf", + "sourceHandle": "{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}", + "target": "AssemblyAILeMUR-jzwHZ", + "targetHandle": "{œfieldNameœ:œtranscription_resultœ,œidœ:œAssemblyAILeMUR-jzwHZœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "data": { + "targetHandle": { + "fieldName": "transcription_result", + "id": "AssemblyAILeMUR-jzwHZ", + "inputTypes": [ + "Data" + ], + "type": "other" + }, + "sourceHandle": { + "dataType": "AssemblyAITranscriptionJobPoller", + "id": "AssemblyAITranscriptionJobPoller-F46nf", + "name": "transcription_result", + "output_types": [ + "Data" + ] + } + }, + "id": "reactflow__edge-AssemblyAITranscriptionJobPoller-F46nf{œdataTypeœ:œAssemblyAITranscriptionJobPollerœ,œidœ:œAssemblyAITranscriptionJobPoller-F46nfœ,œnameœ:œtranscription_resultœ,œoutput_typesœ:[œDataœ]}-AssemblyAILeMUR-jzwHZ{œfieldNameœ:œtranscription_resultœ,œidœ:œAssemblyAILeMUR-jzwHZœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "animated": false, + "className": "" + } + ], + "viewport": { + "x": 733.3920447354355, + "y": -42.8262727047815, + "zoom": 0.2612816498236053 + } + }, + "user_id": "9c01eee4-17dd-460e-8c52-bba36d635a9d", + "folder_id": "54fc9211-d42d-4c3f-a932-ee4987f61988", + "description": "Transcribe and analyze audio with AssemblyAI", + "icon_bg_color": null, + "updated_at": "2024-09-26T14:55:47+00:00", + "webhook": false, + "id": "fa69381c-d1c4-4535-bc23-bc2fb4956e1e" +} \ No newline at end of file diff --git a/docs/docs/Integrations/Google/_category_.json b/docs/docs/Integrations/Google/_category_.json new file mode 100644 index 000000000000..8fd84656cebb --- /dev/null +++ b/docs/docs/Integrations/Google/_category_.json @@ -0,0 +1 @@ +{ "position": 2, "label": "Google" } diff --git a/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md b/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md new file mode 100644 index 000000000000..c1d534b1206f --- /dev/null +++ b/docs/docs/Integrations/Google/integrations-setup-google-oauth-langflow.md @@ -0,0 +1,165 @@ +--- +title: "Setup Google OAuth for Langflow Integration" +slug: /integrations-setup-google-oauth-langflow +sidebar_position: 3 +description: "A comprehensive guide on creating a Google OAuth app, obtaining tokens, and integrating them with Langflow's Google components." +--- + +import TOCInline from '@theme/TOCInline'; + +# Setting Up Google OAuth for Langflow + +Quickly set up Google OAuth to integrate Google Gmail and Drive with Langflow. To do this, create an OAuth app in Google Cloud, obtain the necessary credentials and access tokens, and add them to Langflow’s Google components. + +# Overview + +Langflow supports OAuth for seamless integration with Google services. Just follow the setup steps to configure OAuth credentials, retrieve tokens, and connect your Google services to Langflow. + +--- + +## Step 1: Creating an OAuth Application in Google Cloud {#5b8981b15d86192d17b0e5725c1f95e7} + +1. **Access Google Cloud Console** + + - Go to the [Google Cloud Console](https://console.cloud.google.com/). + +2. **Create or Select a Project** + + - Click **Select a project** at the top of the page and choose an existing project or create a new one. + +![OAuth Client ID and Secret](/img/google/create-a-google-cloud-project.gif) + +3. **Enable APIs for the Project** + + - Go to **APIs & Services > Library** and enable the APIs you need (e.g., Google Drive API, Google Gmail API). + +4. **Navigate to OAuth consent screen** + - Go to **APIs & Services >** and click on **OAuth consent screen**. +5. **Set Up OAuth Consent Screen** + + - On the OAuth consent screen, set up essential app details like the application name, user support email, required [scopes](https://developers.google.com/identity/protocols/oauth2/scopes) (permissions your app needs), and authorized domains. + - Ensure you **publish** the app if it’s not restricted to internal use. + +![OAuth Consent Screen](/img/google/setup-oauth-consent-screen.png) + +:::info + +- Configuring the OAuth consent screen is crucial for obtaining user permissions. + +:::: + +6. **Create OAuth Client ID** + + - Go back to **Credentials**, select **Create Credentials > OAuth Client ID**. + - Choose **Desktop app** as the application type. + +7. **Save OAuth Client ID and Client Secret** + + - After creating, you'll receive a Client ID and Client Secret. You can either view them directly or download them as a JSON file. Please download and save this information securely, as it's essential for using Langflow. + +![OAuth Client ID and Secret](/img/google/create-oauth-client-id.png) + +--- + +## Step 2: Retrieving Access and Refresh Tokens + +With your OAuth application configured and with your Client ID created, follow these steps to obtain the tokens: + +1. **Authenticate the Application** + + - Create a new project in Langflow. + - Add a Google OAuth Token component. + - Input in the field **Credentials File** on the Google OAuth Token component, the JSON file containing the Client ID credentials you downloaded from Google in the [previous steps](#5b8981b15d86192d17b0e5725c1f95e7). + - Run the Google OAuth Token component to authenticate your application. + +:::info + +- Note that when the component is executed, a new tab maybe open in the browser so that you can authenticate using your Google Cloud account where you created the project containing the OAuth Application, the credentials and activated the API that you need to use. +- If a new tab does not open automatically, check the Langflow **Logs** for the Google authentication URL. Open this URL in your browser to complete the authentication. Only after authenticating will the JSON token be generated. + + ::: + +2. **Refresh Tokens** + + - After successful authentication, your Langflow application can request and refresh tokens for your app. These tokens will enable Langflow to interact with Google services on your behalf and execute the requests you’ve specified. + - By default, token validity is managed by Google’s servers. In Langflow, tokens refresh automatically after initial authentication. However, if your application is inactive for an extended period, the tokens may expire, requiring you to re-authenticate to resume use in Langflow. + +--- + +## Step 3: Configuring Google Components in Langflow + +In this example, we will use the Google Drive Loader component to load a text file hosted on Google Drive, translate the text in it to Spanish, and return it to a chat output. + +1. **Open Langflow and Add Google Drive Loader Component** + + - In Langflow, go to your flow editor and add a **Google Drive Loader** component. + +2. **Enter OAuth Credentials** + + - In the `JSON String of the Service Account Token` or `Token String` field of the Google Drive Loader component, enter your JSON string containing the token returned in the output of the Google OAuth Token component. Remember to convert the data output from the Google OAuth Token component to text using the `Parse Data` component. + +3. **Getting File ID from Google Drive** + + Steps to Obtain the Google Drive File ID from a URL: + + 1. **Copy the Google Drive URL:** + + - Open the document in Google Drive and copy the link from the address bar. + + 2. **Identify the Document ID:** + + - The file ID is located between `/d/` and `/edit` in the URL. Example: + + ``` + https://drive.google.com/file/d/1a2b3c4D5E6F7gHI8J9klmnopQ/edit + ``` + + Here, the ID is `1a2b3c4D5E6F7gHI8J9klmnopQ`. + + 3. **Enter the ID in the Component:** + + - In Langflow, paste the copied ID to field Document ID in Google Drive Loader component to allow the component to access the file. + +4. **Test the Connection** + + - After adding credentials and Document ID, test the component’s functionality within your flow to ensure a successful connection. + +--- + +## Step 4: Using Google Components in Your Flow + +With OAuth successfully configured, you can now use Google components in Langflow to automate tasks: + +- **Gmail Loader** + Loads emails from Gmail using the provided credentials. +- **Google Drive Loader** + Loads documents from Google Drive using provided credentials. +- **Google Drive Search** + Searches Google Drive files using provided credentials and query parameters. + +Each component will utilize your OAuth tokens to perform these actions seamlessly. + +## Flow Example + +You can use the Flow below as a starting example for your tests. + +- Flow Google Drive Docs Translations Example - + (Download link) + +--- + +## Troubleshooting Common Issues + +- **Token Expiration**: Ensure to refresh your tokens periodically if you encounter authentication errors. +- **Permission Errors**: Double-check your OAuth consent settings and scopes in your Google Cloud account as well as in your Langflow component settings to ensure you’ve granted the necessary permissions. +- **A new window for authentication did not open?**: Don't worry, you can check the Langflow Logs and look for the following text below. + + Example: + + ``` + Please visit this URL to authorize this application: https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=156549873216-fa86b6a74ff8ee9a69d2b98e0bc478e8.apps.googleusercontent.com&redirect_uri=http%3A%2F%2Flocalhost%3A54899%2F&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.readonly&state=75gxTJWwpUZjSWeyWDL81BmJAzGt1Q&access_type=offline + ``` + +--- + +By following these steps, your Langflow environment will be fully integrated with Google services, providing a powerful tool for automating workflows that involve Google Gmail, Drive, and more. diff --git a/docs/docs/Integrations/Notion/Conversational_Notion_Agent.json b/docs/docs/Integrations/Notion/Conversational_Notion_Agent.json new file mode 100644 index 000000000000..2b8b4063d855 --- /dev/null +++ b/docs/docs/Integrations/Notion/Conversational_Notion_Agent.json @@ -0,0 +1 @@ +{"id":"e070f0be-edc4-4512-bb0f-e53307062a26","data":{"nodes":[{"id":"AddContentToPage-ZezUn","type":"genericNode","position":{"x":1416.217259177943,"y":1709.6205867919527},"data":{"type":"AddContentToPage","node":{"template":{"_type":"Component","block_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"block_id","value":"","display_name":"Page/Block ID","advanced":true,"dynamic":false,"info":"The ID of the page/block to add the content.","title_case":false,"type":"str","_input_type":"StrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nfrom typing import Dict, Any, Union\nfrom markdown import markdown\nfrom bs4 import BeautifulSoup\nimport requests\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom pydantic import BaseModel, Field\n\n\nclass AddContentToPage(LCToolComponent):\n display_name: str = \"Add Content to Page \"\n description: str = \"Convert markdown text to Notion blocks and append them to a Notion page.\"\n documentation: str = \"https://developers.notion.com/reference/patch-block-children\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n MultilineInput(\n name=\"markdown_text\",\n display_name=\"Markdown Text\",\n info=\"The markdown text to convert to Notion blocks.\",\n ),\n StrInput(\n name=\"block_id\",\n display_name=\"Page/Block ID\",\n info=\"The ID of the page/block to add the content.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class AddContentToPageSchema(BaseModel):\n markdown_text: str = Field(..., description=\"The markdown text to convert to Notion blocks.\")\n block_id: str = Field(..., description=\"The ID of the page/block to add the content.\")\n\n def run_model(self) -> Data:\n result = self._add_content_to_page(self.markdown_text, self.block_id)\n return Data(data=result, text=json.dumps(result))\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"add_content_to_notion_page\",\n description=\"Convert markdown text to Notion blocks and append them to a Notion page.\",\n func=self._add_content_to_page,\n args_schema=self.AddContentToPageSchema,\n )\n\n def _add_content_to_page(self, markdown_text: str, block_id: str) -> Union[Dict[str, Any], str]:\n try:\n html_text = markdown(markdown_text)\n soup = BeautifulSoup(html_text, \"html.parser\")\n blocks = self.process_node(soup)\n\n url = f\"https://api.notion.com/v1/blocks/{block_id}/children\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"children\": blocks,\n }\n\n response = requests.patch(url, headers=headers, json=data)\n response.raise_for_status()\n\n return response.json()\n except requests.exceptions.RequestException as e:\n error_message = f\"Error: Failed to add content to Notion page. {str(e)}\"\n if hasattr(e, \"response\") and e.response is not None:\n error_message += f\" Status code: {e.response.status_code}, Response: {e.response.text}\"\n return error_message\n except Exception as e:\n return f\"Error: An unexpected error occurred while adding content to Notion page. {str(e)}\"\n\n def process_node(self, node):\n blocks = []\n if isinstance(node, str):\n text = node.strip()\n if text:\n if text.startswith(\"#\"):\n heading_level = text.count(\"#\", 0, 6)\n heading_text = text[heading_level:].strip()\n if heading_level == 1:\n blocks.append(self.create_block(\"heading_1\", heading_text))\n elif heading_level == 2:\n blocks.append(self.create_block(\"heading_2\", heading_text))\n elif heading_level == 3:\n blocks.append(self.create_block(\"heading_3\", heading_text))\n else:\n blocks.append(self.create_block(\"paragraph\", text))\n elif node.name == \"h1\":\n blocks.append(self.create_block(\"heading_1\", node.get_text(strip=True)))\n elif node.name == \"h2\":\n blocks.append(self.create_block(\"heading_2\", node.get_text(strip=True)))\n elif node.name == \"h3\":\n blocks.append(self.create_block(\"heading_3\", node.get_text(strip=True)))\n elif node.name == \"p\":\n code_node = node.find(\"code\")\n if code_node:\n code_text = code_node.get_text()\n language, code = self.extract_language_and_code(code_text)\n blocks.append(self.create_block(\"code\", code, language=language))\n elif self.is_table(str(node)):\n blocks.extend(self.process_table(node))\n else:\n blocks.append(self.create_block(\"paragraph\", node.get_text(strip=True)))\n elif node.name == \"ul\":\n blocks.extend(self.process_list(node, \"bulleted_list_item\"))\n elif node.name == \"ol\":\n blocks.extend(self.process_list(node, \"numbered_list_item\"))\n elif node.name == \"blockquote\":\n blocks.append(self.create_block(\"quote\", node.get_text(strip=True)))\n elif node.name == \"hr\":\n blocks.append(self.create_block(\"divider\", \"\"))\n elif node.name == \"img\":\n blocks.append(self.create_block(\"image\", \"\", image_url=node.get(\"src\")))\n elif node.name == \"a\":\n blocks.append(self.create_block(\"bookmark\", node.get_text(strip=True), link_url=node.get(\"href\")))\n elif node.name == \"table\":\n blocks.extend(self.process_table(node))\n\n for child in node.children:\n if isinstance(child, str):\n continue\n blocks.extend(self.process_node(child))\n\n return blocks\n\n def extract_language_and_code(self, code_text):\n lines = code_text.split(\"\\n\")\n language = lines[0].strip()\n code = \"\\n\".join(lines[1:]).strip()\n return language, code\n\n def is_code_block(self, text):\n return text.startswith(\"```\")\n\n def extract_code_block(self, text):\n lines = text.split(\"\\n\")\n language = lines[0].strip(\"`\").strip()\n code = \"\\n\".join(lines[1:]).strip(\"`\").strip()\n return language, code\n\n def is_table(self, text):\n rows = text.split(\"\\n\")\n if len(rows) < 2:\n return False\n\n has_separator = False\n for i, row in enumerate(rows):\n if \"|\" in row:\n cells = [cell.strip() for cell in row.split(\"|\")]\n cells = [cell for cell in cells if cell] # Remove empty cells\n if i == 1 and all(set(cell) <= set(\"-|\") for cell in cells):\n has_separator = True\n elif not cells:\n return False\n\n return has_separator and len(rows) >= 3\n\n def process_list(self, node, list_type):\n blocks = []\n for item in node.find_all(\"li\"):\n item_text = item.get_text(strip=True)\n checked = item_text.startswith(\"[x]\")\n is_checklist = item_text.startswith(\"[ ]\") or checked\n\n if is_checklist:\n item_text = item_text.replace(\"[x]\", \"\").replace(\"[ ]\", \"\").strip()\n blocks.append(self.create_block(\"to_do\", item_text, checked=checked))\n else:\n blocks.append(self.create_block(list_type, item_text))\n return blocks\n\n def process_table(self, node):\n blocks = []\n header_row = node.find(\"thead\").find(\"tr\") if node.find(\"thead\") else None\n body_rows = node.find(\"tbody\").find_all(\"tr\") if node.find(\"tbody\") else []\n\n if header_row or body_rows:\n table_width = max(\n len(header_row.find_all([\"th\", \"td\"])) if header_row else 0,\n max(len(row.find_all([\"th\", \"td\"])) for row in body_rows),\n )\n\n table_block = self.create_block(\"table\", \"\", table_width=table_width, has_column_header=bool(header_row))\n blocks.append(table_block)\n\n if header_row:\n header_cells = [cell.get_text(strip=True) for cell in header_row.find_all([\"th\", \"td\"])]\n header_row_block = self.create_block(\"table_row\", header_cells)\n blocks.append(header_row_block)\n\n for row in body_rows:\n cells = [cell.get_text(strip=True) for cell in row.find_all([\"th\", \"td\"])]\n row_block = self.create_block(\"table_row\", cells)\n blocks.append(row_block)\n\n return blocks\n\n def create_block(self, block_type: str, content: str, **kwargs) -> Dict[str, Any]:\n block: dict[str, Any] = {\n \"object\": \"block\",\n \"type\": block_type,\n block_type: {},\n }\n\n if block_type in [\n \"paragraph\",\n \"heading_1\",\n \"heading_2\",\n \"heading_3\",\n \"bulleted_list_item\",\n \"numbered_list_item\",\n \"quote\",\n ]:\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n elif block_type == \"to_do\":\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n block[block_type][\"checked\"] = kwargs.get(\"checked\", False)\n elif block_type == \"code\":\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n block[block_type][\"language\"] = kwargs.get(\"language\", \"plain text\")\n elif block_type == \"image\":\n block[block_type] = {\"type\": \"external\", \"external\": {\"url\": kwargs.get(\"image_url\", \"\")}}\n elif block_type == \"divider\":\n pass\n elif block_type == \"bookmark\":\n block[block_type][\"url\"] = kwargs.get(\"link_url\", \"\")\n elif block_type == \"table\":\n block[block_type][\"table_width\"] = kwargs.get(\"table_width\", 0)\n block[block_type][\"has_column_header\"] = kwargs.get(\"has_column_header\", False)\n block[block_type][\"has_row_header\"] = kwargs.get(\"has_row_header\", False)\n elif block_type == \"table_row\":\n block[block_type][\"cells\"] = [[{\"type\": \"text\", \"text\": {\"content\": cell}} for cell in content]]\n\n return block\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"markdown_text":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"markdown_text","value":"","display_name":"Markdown Text","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The markdown text to convert to Notion blocks.","title_case":false,"type":"str","_input_type":"MultilineInput"},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Convert markdown text to Notion blocks and append them to a Notion page.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Add Content to Page ","documentation":"https://developers.notion.com/reference/patch-block-children","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["markdown_text","block_id","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"AddContentToPage-ZezUn","description":"Convert markdown text to Notion blocks and append them to a Notion page.","display_name":"Add Content to Page "},"selected":false,"width":384,"height":330,"dragging":false,"positionAbsolute":{"x":1416.217259177943,"y":1709.6205867919527}},{"id":"NotionPageCreator-6SCB5","type":"genericNode","position":{"x":1413.9782390799146,"y":2051.645785494985},"data":{"type":"NotionPageCreator","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nfrom typing import Dict, Any, Union\nimport requests\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom langflow.io import Output\n\nclass NotionPageCreator(LCToolComponent):\n display_name: str = \"Create Page \"\n description: str = \"A component for creating Notion pages.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-create\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"database_id\",\n display_name=\"Database ID\",\n info=\"The ID of the Notion database.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n MultilineInput(\n name=\"properties_json\",\n display_name=\"Properties (JSON)\",\n info=\"The properties of the new page as a JSON string.\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionPageCreatorSchema(BaseModel):\n database_id: str = Field(..., description=\"The ID of the Notion database.\")\n properties_json: str = Field(..., description=\"The properties of the new page as a JSON string.\")\n\n def run_model(self) -> Data:\n result = self._create_notion_page(self.database_id, self.properties_json)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the created page data\n output = \"Created page properties:\\n\"\n for prop_name, prop_value in result.get(\"properties\", {}).items():\n output += f\"{prop_name}: {prop_value}\\n\"\n return Data(text=output, data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"create_notion_page\",\n description=\"Create a new page in a Notion database. IMPORTANT: Use the tool to check the Database properties for more details before using this tool.\",\n func=self._create_notion_page,\n args_schema=self.NotionPageCreatorSchema,\n )\n\n def _create_notion_page(self, database_id: str, properties_json: str) -> Union[Dict[str, Any], str]:\n if not database_id or not properties_json:\n return \"Invalid input. Please provide 'database_id' and 'properties_json'.\"\n\n try:\n properties = json.loads(properties_json)\n except json.JSONDecodeError as e:\n return f\"Invalid properties format. Please provide a valid JSON string. Error: {str(e)}\"\n\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"parent\": {\"database_id\": database_id},\n \"properties\": properties,\n }\n\n try:\n response = requests.post(\"https://api.notion.com/v1/pages\", headers=headers, json=data)\n response.raise_for_status()\n result = response.json()\n return result\n except requests.exceptions.RequestException as e:\n error_message = f\"Failed to create Notion page. Error: {str(e)}\"\n if hasattr(e, \"response\") and e.response is not None:\n error_message += f\" Status code: {e.response.status_code}, Response: {e.response.text}\"\n return error_message\n\n def __call__(self, *args, **kwargs):\n return self._create_notion_page(*args, **kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"database_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"database_id","value":"","display_name":"Database ID","advanced":true,"dynamic":false,"info":"The ID of the Notion database.","title_case":false,"type":"str","_input_type":"StrInput"},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"properties_json":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"properties_json","value":"","display_name":"Properties (JSON)","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The properties of the new page as a JSON string.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"A component for creating Notion pages.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Create Page ","documentation":"https://docs.langflow.org/integrations/notion/page-create","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["database_id","notion_secret","properties_json"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionPageCreator-6SCB5","description":"A component for creating Notion pages.","display_name":"Create Page "},"selected":false,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":1413.9782390799146,"y":2051.645785494985}},{"id":"NotionDatabaseProperties-aeWil","type":"genericNode","position":{"x":1004.5753613670959,"y":1713.914531491452},"data":{"type":"NotionDatabaseProperties","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import Dict, Union\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom langflow.io import Output\n\nclass NotionDatabaseProperties(LCToolComponent):\n display_name: str = \"List Database Properties \"\n description: str = \"Retrieve properties of a Notion database.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-database-properties\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"database_id\",\n display_name=\"Database ID\",\n info=\"The ID of the Notion database.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionDatabasePropertiesSchema(BaseModel):\n database_id: str = Field(..., description=\"The ID of the Notion database.\")\n\n def run_model(self) -> Data:\n result = self._fetch_database_properties(self.database_id)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the properties\n return Data(text=str(result), data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_database_properties\",\n description=\"Retrieve properties of a Notion database. Input should include the database ID.\",\n func=self._fetch_database_properties,\n args_schema=self.NotionDatabasePropertiesSchema,\n )\n\n def _fetch_database_properties(self, database_id: str) -> Union[Dict, str]:\n url = f\"https://api.notion.com/v1/databases/{database_id}\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\n }\n try:\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n data = response.json()\n properties = data.get(\"properties\", {})\n return properties\n except requests.exceptions.RequestException as e:\n return f\"Error fetching Notion database properties: {str(e)}\"\n except ValueError as e:\n return f\"Error parsing Notion API response: {str(e)}\"\n except Exception as e:\n return f\"An unexpected error occurred: {str(e)}\"\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"database_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"database_id","value":"","display_name":"Database ID","advanced":true,"dynamic":false,"info":"The ID of the Notion database.","title_case":false,"type":"str","_input_type":"StrInput"},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieve properties of a Notion database.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Database Properties ","documentation":"https://docs.langflow.org/integrations/notion/list-database-properties","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["database_id","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionDatabaseProperties-aeWil","description":"Retrieve properties of a Notion database.","display_name":"List Database Properties "},"selected":false,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":1004.5753613670959,"y":1713.914531491452}},{"id":"NotionListPages-znA3w","type":"genericNode","position":{"x":1006.1848442547046,"y":2022.7880909242833},"data":{"type":"NotionListPages","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nimport json\nfrom typing import Dict, Any, List, Optional\nfrom pydantic import BaseModel, Field\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionListPages(LCToolComponent):\n display_name: str = \"List Pages \"\n description: str = (\n \"Query a Notion database with filtering and sorting. \"\n \"The input should be a JSON string containing the 'filter' and 'sorts' objects. \"\n \"Example input:\\n\"\n '{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}'\n )\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-pages\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n StrInput(\n name=\"database_id\",\n display_name=\"Database ID\",\n info=\"The ID of the Notion database to query.\",\n ),\n MultilineInput(\n name=\"query_json\",\n display_name=\"Database query (JSON)\",\n info=\"A JSON string containing the filters and sorts that will be used for querying the database. Leave empty for no filters or sorts.\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n class NotionListPagesSchema(BaseModel):\n database_id: str = Field(..., description=\"The ID of the Notion database to query.\")\n query_json: Optional[str] = Field(\n default=\"\",\n description=\"A JSON string containing the filters and sorts for querying the database. Leave empty for no filters or sorts.\",\n )\n\n def run_model(self) -> List[Data]:\n result = self._query_notion_database(self.database_id, self.query_json)\n\n if isinstance(result, str):\n # An error occurred, return it as a single record\n return [Data(text=result)]\n\n records = []\n combined_text = f\"Pages found: {len(result)}\\n\\n\"\n\n for page in result:\n page_data = {\n \"id\": page[\"id\"],\n \"url\": page[\"url\"],\n \"created_time\": page[\"created_time\"],\n \"last_edited_time\": page[\"last_edited_time\"],\n \"properties\": page[\"properties\"],\n }\n\n text = (\n f\"id: {page['id']}\\n\"\n f\"url: {page['url']}\\n\"\n f\"created_time: {page['created_time']}\\n\"\n f\"last_edited_time: {page['last_edited_time']}\\n\"\n f\"properties: {json.dumps(page['properties'], indent=2)}\\n\\n\"\n )\n\n combined_text += text\n records.append(Data(text=text, **page_data))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_list_pages\",\n description=self.description,\n func=self._query_notion_database,\n args_schema=self.NotionListPagesSchema,\n )\n\n def _query_notion_database(self, database_id: str, query_json: Optional[str] = None) -> List[Dict[str, Any]] | str:\n url = f\"https://api.notion.com/v1/databases/{database_id}/query\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n query_payload = {}\n if query_json and query_json.strip():\n try:\n query_payload = json.loads(query_json)\n except json.JSONDecodeError as e:\n return f\"Invalid JSON format for query: {str(e)}\"\n\n try:\n response = requests.post(url, headers=headers, json=query_payload)\n response.raise_for_status()\n results = response.json()\n return results[\"results\"]\n except requests.exceptions.RequestException as e:\n return f\"Error querying Notion database: {str(e)}\"\n except KeyError:\n return \"Unexpected response format from Notion API\"\n except Exception as e:\n return f\"An unexpected error occurred: {str(e)}\"\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"database_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"database_id","value":"","display_name":"Database ID","advanced":true,"dynamic":false,"info":"The ID of the Notion database to query.","title_case":false,"type":"str","_input_type":"StrInput"},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"query_json":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"query_json","value":"","display_name":"Database query (JSON)","advanced":true,"input_types":["Message"],"dynamic":false,"info":"A JSON string containing the filters and sorts that will be used for querying the database. Leave empty for no filters or sorts.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Pages ","documentation":"https://docs.langflow.org/integrations/notion/list-pages","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["notion_secret","database_id","query_json"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionListPages-znA3w","description":"Query a Notion database with filtering and sorting. The input should be a JSON string containing the 'filter' and 'sorts' objects. Example input:\n{\"filter\": {\"property\": \"Status\", \"select\": {\"equals\": \"Done\"}}, \"sorts\": [{\"timestamp\": \"created_time\", \"direction\": \"descending\"}]}","display_name":"List Pages "},"selected":false,"width":384,"height":470,"dragging":false,"positionAbsolute":{"x":1006.1848442547046,"y":2022.7880909242833}},{"id":"NotionUserList-C3eGn","type":"genericNode","position":{"x":2260.15497405973,"y":1717.4551881467207},"data":{"type":"NotionUserList","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import List, Dict\nfrom pydantic import BaseModel\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionUserList(LCToolComponent):\n display_name = \"List Users \"\n description = \"Retrieve users from Notion.\"\n documentation = \"https://docs.langflow.org/integrations/notion/list-users\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionUserListSchema(BaseModel):\n pass\n\n def run_model(self) -> List[Data]:\n users = self._list_users()\n records = []\n combined_text = \"\"\n\n for user in users:\n output = \"User:\\n\"\n for key, value in user.items():\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\n output += \"________________________\\n\"\n\n combined_text += output\n records.append(Data(text=output, data=user))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_list_users\",\n description=\"Retrieve users from Notion.\",\n func=self._list_users,\n args_schema=self.NotionUserListSchema,\n )\n\n def _list_users(self) -> List[Dict]:\n url = \"https://api.notion.com/v1/users\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n\n data = response.json()\n results = data[\"results\"]\n\n users = []\n for user in results:\n user_data = {\n \"id\": user[\"id\"],\n \"type\": user[\"type\"],\n \"name\": user.get(\"name\", \"\"),\n \"avatar_url\": user.get(\"avatar_url\", \"\"),\n }\n users.append(user_data)\n\n return users\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieve users from Notion.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Users ","documentation":"https://docs.langflow.org/integrations/notion/list-users","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionUserList-C3eGn","description":"Retrieve users from Notion.","display_name":"List Users "},"selected":true,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":2260.15497405973,"y":1717.4551881467207}},{"id":"NotionPageContent-SlL21","type":"genericNode","position":{"x":1826.4242329724448,"y":1715.6365113286927},"data":{"type":"NotionPageContent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom langflow.io import Output\n\nclass NotionPageContent(LCToolComponent):\n display_name = \"Page Content Viewer \"\n description = \"Retrieve the content of a Notion page as plain text.\"\n documentation = \"https://docs.langflow.org/integrations/notion/page-content-viewer\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"page_id\",\n display_name=\"Page ID\",\n info=\"The ID of the Notion page to retrieve.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionPageContentSchema(BaseModel):\n page_id: str = Field(..., description=\"The ID of the Notion page to retrieve.\")\n\n def run_model(self) -> Data:\n result = self._retrieve_page_content(self.page_id)\n if isinstance(result, str) and result.startswith(\"Error:\"):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the content\n return Data(text=result, data={\"content\": result})\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_page_content\",\n description=\"Retrieve the content of a Notion page as plain text.\",\n func=self._retrieve_page_content,\n args_schema=self.NotionPageContentSchema,\n )\n\n def _retrieve_page_content(self, page_id: str) -> str:\n blocks_url = f\"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\",\n }\n try:\n blocks_response = requests.get(blocks_url, headers=headers)\n blocks_response.raise_for_status()\n blocks_data = blocks_response.json()\n return self.parse_blocks(blocks_data.get(\"results\", []))\n except requests.exceptions.RequestException as e:\n error_message = f\"Error: Failed to retrieve Notion page content. {str(e)}\"\n if hasattr(e, \"response\") and e.response is not None:\n error_message += f\" Status code: {e.response.status_code}, Response: {e.response.text}\"\n return error_message\n except Exception as e:\n return f\"Error: An unexpected error occurred while retrieving Notion page content. {str(e)}\"\n\n def parse_blocks(self, blocks: list) -> str:\n content = \"\"\n for block in blocks:\n block_type = block.get(\"type\")\n if block_type in [\"paragraph\", \"heading_1\", \"heading_2\", \"heading_3\", \"quote\"]:\n content += self.parse_rich_text(block[block_type].get(\"rich_text\", [])) + \"\\n\\n\"\n elif block_type in [\"bulleted_list_item\", \"numbered_list_item\"]:\n content += self.parse_rich_text(block[block_type].get(\"rich_text\", [])) + \"\\n\"\n elif block_type == \"to_do\":\n content += self.parse_rich_text(block[\"to_do\"].get(\"rich_text\", [])) + \"\\n\"\n elif block_type == \"code\":\n content += self.parse_rich_text(block[\"code\"].get(\"rich_text\", [])) + \"\\n\\n\"\n elif block_type == \"image\":\n content += f\"[Image: {block['image'].get('external', {}).get('url', 'No URL')}]\\n\\n\"\n elif block_type == \"divider\":\n content += \"---\\n\\n\"\n return content.strip()\n\n def parse_rich_text(self, rich_text: list) -> str:\n return \"\".join(segment.get(\"plain_text\", \"\") for segment in rich_text)\n\n def __call__(self, *args, **kwargs):\n return self._retrieve_page_content(*args, **kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"page_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"page_id","value":"","display_name":"Page ID","advanced":true,"dynamic":false,"info":"The ID of the Notion page to retrieve.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"Retrieve the content of a Notion page as plain text.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Page Content Viewer ","documentation":"https://docs.langflow.org/integrations/notion/page-content-viewer","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["page_id","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionPageContent-SlL21","description":"Retrieve the content of a Notion page as plain text.","display_name":"Page Content Viewer "},"selected":false,"width":384,"height":330,"dragging":false,"positionAbsolute":{"x":1826.4242329724448,"y":1715.6365113286927}},{"id":"NotionSearch-VS2mI","type":"genericNode","position":{"x":2258.1166047519732,"y":2034.3959294952945},"data":{"type":"NotionSearch","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import Dict, Any, List\nfrom pydantic import BaseModel, Field\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, DropdownInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionSearch(LCToolComponent):\n display_name: str = \"Search \"\n description: str = \"Searches all pages and databases that have been shared with an integration. The search field can be an empty value to show all values from that search\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n StrInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The text that the API compares page and database titles against.\",\n ),\n DropdownInput(\n name=\"filter_value\",\n display_name=\"Filter Type\",\n info=\"Limits the results to either only pages or only databases.\",\n options=[\"page\", \"database\"],\n value=\"page\",\n ),\n DropdownInput(\n name=\"sort_direction\",\n display_name=\"Sort Direction\",\n info=\"The direction to sort the results.\",\n options=[\"ascending\", \"descending\"],\n value=\"descending\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionSearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query text.\")\n filter_value: str = Field(default=\"page\", description=\"Filter type: 'page' or 'database'.\")\n sort_direction: str = Field(default=\"descending\", description=\"Sort direction: 'ascending' or 'descending'.\")\n\n def run_model(self) -> List[Data]:\n results = self._search_notion(self.query, self.filter_value, self.sort_direction)\n records = []\n combined_text = f\"Results found: {len(results)}\\n\\n\"\n\n for result in results:\n result_data = {\n \"id\": result[\"id\"],\n \"type\": result[\"object\"],\n \"last_edited_time\": result[\"last_edited_time\"],\n }\n\n if result[\"object\"] == \"page\":\n result_data[\"title_or_url\"] = result[\"url\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\n elif result[\"object\"] == \"database\":\n if \"title\" in result and isinstance(result[\"title\"], list) and len(result[\"title\"]) > 0:\n result_data[\"title_or_url\"] = result[\"title\"][0][\"plain_text\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\n else:\n result_data[\"title_or_url\"] = \"N/A\"\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\n\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\n combined_text += text\n records.append(Data(text=text, data=result_data))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_search\",\n description=\"Search Notion pages and databases. Input should include the search query and optionally filter type and sort direction.\",\n func=self._search_notion,\n args_schema=self.NotionSearchSchema,\n )\n\n def _search_notion(\n self, query: str, filter_value: str = \"page\", sort_direction: str = \"descending\"\n ) -> List[Dict[str, Any]]:\n url = \"https://api.notion.com/v1/search\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"query\": query,\n \"filter\": {\"value\": filter_value, \"property\": \"object\"},\n \"sort\": {\"direction\": sort_direction, \"timestamp\": \"last_edited_time\"},\n }\n\n response = requests.post(url, headers=headers, json=data)\n response.raise_for_status()\n\n results = response.json()\n return results[\"results\"]\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filter_value":{"trace_as_metadata":true,"options":["page","database"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"filter_value","value":"page","display_name":"Filter Type","advanced":true,"dynamic":false,"info":"Limits the results to either only pages or only databases.","title_case":false,"type":"str","_input_type":"DropdownInput"},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"query":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"query","value":"","display_name":"Search Query","advanced":true,"dynamic":false,"info":"The text that the API compares page and database titles against.","title_case":false,"type":"str","_input_type":"StrInput"},"sort_direction":{"trace_as_metadata":true,"options":["ascending","descending"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sort_direction","value":"descending","display_name":"Sort Direction","advanced":true,"dynamic":false,"info":"The direction to sort the results.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"Searches all pages and databases that have been shared with an integration. The search field can be an empty value to show all values from that search","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Search ","documentation":"https://docs.langflow.org/integrations/notion/search","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["notion_secret","query","filter_value","sort_direction"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionSearch-VS2mI","description":"Searches all pages and databases that have been shared with an integration.","display_name":"Search "},"selected":false,"width":384,"height":386,"dragging":false,"positionAbsolute":{"x":2258.1166047519732,"y":2034.3959294952945}},{"id":"NotionPageUpdate-6FyYd","type":"genericNode","position":{"x":1827.0574354713603,"y":2055.9948126656136},"data":{"type":"NotionPageUpdate","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nimport requests\nfrom typing import Dict, Any, Union\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom loguru import logger\nfrom langflow.io import Output\n\nclass NotionPageUpdate(LCToolComponent):\n display_name: str = \"Update Page Property \"\n description: str = \"Update the properties of a Notion page.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-update\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"page_id\",\n display_name=\"Page ID\",\n info=\"The ID of the Notion page to update.\",\n ),\n MultilineInput(\n name=\"properties\",\n display_name=\"Properties\",\n info=\"The properties to update on the page (as a JSON string or a dictionary).\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionPageUpdateSchema(BaseModel):\n page_id: str = Field(..., description=\"The ID of the Notion page to update.\")\n properties: Union[str, Dict[str, Any]] = Field(\n ..., description=\"The properties to update on the page (as a JSON string or a dictionary).\"\n )\n\n def run_model(self) -> Data:\n result = self._update_notion_page(self.page_id, self.properties)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the updated page data\n output = \"Updated page properties:\\n\"\n for prop_name, prop_value in result.get(\"properties\", {}).items():\n output += f\"{prop_name}: {prop_value}\\n\"\n return Data(text=output, data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"update_notion_page\",\n description=\"Update the properties of a Notion page. IMPORTANT: Use the tool to check the Database properties for more details before using this tool.\",\n func=self._update_notion_page,\n args_schema=self.NotionPageUpdateSchema,\n )\n\n def _update_notion_page(self, page_id: str, properties: Union[str, Dict[str, Any]]) -> Union[Dict[str, Any], str]:\n url = f\"https://api.notion.com/v1/pages/{page_id}\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\n }\n\n # Parse properties if it's a string\n if isinstance(properties, str):\n try:\n parsed_properties = json.loads(properties)\n except json.JSONDecodeError as e:\n error_message = f\"Invalid JSON format for properties: {str(e)}\"\n logger.error(error_message)\n return error_message\n\n else:\n parsed_properties = properties\n\n data = {\"properties\": parsed_properties}\n\n try:\n logger.info(f\"Sending request to Notion API: URL: {url}, Data: {json.dumps(data)}\")\n response = requests.patch(url, headers=headers, json=data)\n response.raise_for_status()\n updated_page = response.json()\n\n logger.info(f\"Successfully updated Notion page. Response: {json.dumps(updated_page)}\")\n return updated_page\n except requests.exceptions.HTTPError as e:\n error_message = f\"HTTP Error occurred: {str(e)}\"\n if e.response is not None:\n error_message += f\"\\nStatus code: {e.response.status_code}\"\n error_message += f\"\\nResponse body: {e.response.text}\"\n logger.error(error_message)\n return error_message\n except requests.exceptions.RequestException as e:\n error_message = f\"An error occurred while making the request: {str(e)}\"\n logger.error(error_message)\n return error_message\n except Exception as e:\n error_message = f\"An unexpected error occurred: {str(e)}\"\n logger.error(error_message)\n return error_message\n\n def __call__(self, *args, **kwargs):\n return self._update_notion_page(*args, **kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":true,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"page_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"page_id","value":"","display_name":"Page ID","advanced":true,"dynamic":false,"info":"The ID of the Notion page to update.","title_case":false,"type":"str","_input_type":"StrInput"},"properties":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"properties","value":"","display_name":"Properties","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The properties to update on the page (as a JSON string or a dictionary).","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Update the properties of a Notion page.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Update Page Property ","documentation":"https://docs.langflow.org/integrations/notion/page-update","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["page_id","properties","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionPageUpdate-6FyYd","description":"Update the properties of a Notion page.","display_name":"Update Page Property "},"selected":false,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":1827.0574354713603,"y":2055.9948126656136}},{"id":"ToolCallingAgent-50Gcd","type":"genericNode","position":{"x":2186.0530739759893,"y":612.1744804997304},"data":{"type":"ToolCallingAgent","node":{"template":{"_type":"Component","chat_history":{"trace_as_metadata":true,"list":true,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"chat_history","value":"","display_name":"Chat History","advanced":false,"input_types":["Data"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"DataInput"},"llm":{"trace_as_metadata":true,"list":false,"required":true,"placeholder":"","show":true,"name":"llm","value":"","display_name":"Language Model","advanced":false,"input_types":["LanguageModel"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"tools":{"trace_as_metadata":true,"list":true,"required":false,"placeholder":"","show":true,"name":"tools","value":"","display_name":"Tools","advanced":false,"input_types":["Tool","BaseTool"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"LangChain\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"handle_parsing_errors":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"handle_parsing_errors","value":true,"display_name":"Handle Parse Errors","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"max_iterations":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"max_iterations","value":15,"display_name":"Max Iterations","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"int","_input_type":"IntInput"},"system_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_prompt","value":"","display_name":"System Prompt","advanced":false,"input_types":["Message"],"dynamic":false,"info":"System prompt for the agent.","title_case":false,"type":"str","_input_type":"MultilineInput"},"user_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"user_prompt","value":"{input}","display_name":"Prompt","advanced":true,"input_types":["Message"],"dynamic":false,"info":"This prompt must contain 'input' key.","title_case":false,"type":"str","_input_type":"MultilineInput"},"verbose":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"verbose","value":true,"display_name":"Verbose","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Agent that uses tools","icon":"LangChain","base_classes":["AgentExecutor","Message"],"display_name":"Tool Calling Agent","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["AgentExecutor"],"selected":"AgentExecutor","name":"agent","display_name":"Agent","method":"build_agent","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Message"],"selected":"Message","name":"response","display_name":"Response","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","handle_parsing_errors","verbose","max_iterations","tools","llm","system_prompt","user_prompt","chat_history"],"beta":true,"edited":false,"lf_version":"1.0.17"},"id":"ToolCallingAgent-50Gcd"},"selected":false,"width":384,"height":532,"dragging":false,"positionAbsolute":{"x":2186.0530739759893,"y":612.1744804997304}},{"id":"ChatOutput-TSCup","type":"genericNode","position":{"x":2649.190603849412,"y":841.0466487848925},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"data_template","value":"{text}","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"AI","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ChatOutput-TSCup"},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":2649.190603849412,"y":841.0466487848925},"dragging":false},{"id":"ChatInput-bcq6D","type":"genericNode","position":{"x":557.6262725075026,"y":724.8518930903978},"data":{"type":"ChatInput","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"name":"files","value":"","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file","_input_type":"FileInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"list users","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"User","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"User","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ChatInput-bcq6D"},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":557.6262725075026,"y":724.8518930903978},"dragging":false},{"id":"ToolkitComponent-2lNG0","type":"genericNode","position":{"x":1731.8884789245508,"y":1378.7846304343796},"data":{"type":"ToolkitComponent","node":{"template":{"_type":"Component","tools":{"trace_as_metadata":true,"list":true,"required":false,"placeholder":"","show":true,"name":"tools","value":"","display_name":"Tools","advanced":false,"input_types":["Tool"],"dynamic":false,"info":"List of tools to combine.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import List\r\nfrom langflow.custom import Component\r\nfrom langflow.inputs import HandleInput, MessageTextInput\r\nfrom langflow.template import Output\r\nfrom langflow.field_typing import Tool, Embeddings\r\nfrom langchain.tools.base import BaseTool, StructuredTool\r\nfrom langflow.schema import Data\r\n\r\nclass ToolkitComponent(Component):\r\n display_name = \"Toolkit\"\r\n description = \"Combines multiple tools into a single list of tools.\"\r\n icon = \"pocket-knife\"\r\n\r\n inputs = [\r\n HandleInput(\r\n name=\"tools\",\r\n display_name=\"Tools\",\r\n input_types=[\"Tool\"],\r\n info=\"List of tools to combine.\",\r\n is_list=True,\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Tools\", name=\"generated_tools\", method=\"generate_toolkit\"),\r\n Output(display_name=\"Tool Data\", name=\"tool_data\", method=\"generate_tool_data\"),\r\n ]\r\n\r\n def generate_toolkit(self) -> List[BaseTool]:\r\n combined_tools = []\r\n name_count = {}\r\n for index, tool in enumerate(self.tools):\r\n self.log(f\"Processing tool {index}: {type(tool)}\")\r\n if isinstance(tool, (BaseTool, StructuredTool)):\r\n processed_tool = tool\r\n elif hasattr(tool, 'build_tool'):\r\n processed_tool = tool.build_tool()\r\n else:\r\n self.log(f\"Unsupported tool type: {type(tool)}. Attempting to process anyway.\")\r\n processed_tool = tool\r\n\r\n original_name = getattr(processed_tool, 'name', f\"UnnamedTool_{index}\")\r\n self.log(f\"Original tool name: {original_name}\")\r\n\r\n if original_name not in name_count:\r\n name_count[original_name] = 0\r\n final_name = original_name\r\n else:\r\n name_count[original_name] += 1\r\n final_name = f\"{original_name}_{name_count[original_name]}\"\r\n\r\n if hasattr(processed_tool, 'name'):\r\n processed_tool.name = final_name\r\n\r\n self.log(f\"Final tool name: {final_name}\")\r\n\r\n if isinstance(processed_tool, StructuredTool) and hasattr(processed_tool, 'args_schema'):\r\n processed_tool.args_schema.name = f\"{final_name}_Schema\"\r\n\r\n combined_tools.append(processed_tool)\r\n\r\n debug_info = \"\\n\".join([f\"Tool {i}: {getattr(tool, 'name', f'UnnamedTool_{i}')} (Original: {getattr(tool, '_original_name', 'N/A')}) - Type: {type(tool)}\" for i, tool in enumerate(combined_tools)])\r\n self.log(\"Final toolkit composition:\")\r\n self.log(debug_info)\r\n\r\n\r\n self.status = combined_tools\r\n return combined_tools\r\n\r\n def generate_tool_data(self) -> List[Data]:\r\n tool_data = []\r\n for tool in self.generate_toolkit():\r\n tool_data.append(Data(\r\n data={\r\n \"name\": getattr(tool, 'name', 'Unnamed Tool'),\r\n \"description\": getattr(tool, 'description', 'No description available')\r\n }\r\n ))\r\n return tool_data","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false}},"description":"Combines multiple tools into a single list of tools.","icon":"pocket-knife","base_classes":["BaseTool","Data"],"display_name":"Toolkit","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["BaseTool"],"selected":"BaseTool","name":"generated_tools","display_name":"Tools","method":"generate_toolkit","value":"__UNDEFINED__","cache":true},{"types":["Data"],"selected":"Data","name":"tool_data","display_name":"Tool Data","method":"generate_tool_data","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["tools"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"ToolkitComponent-2lNG0"},"selected":false,"width":384,"height":292,"dragging":false,"positionAbsolute":{"x":1731.8884789245508,"y":1378.7846304343796}},{"id":"OpenAIModel-BJWIg","type":"genericNode","position":{"x":1718.9773974162958,"y":603.4642741725065},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":true,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":true,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":"0.2","display_name":"Temperature","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"OpenAIModel-BJWIg"},"selected":false,"width":384,"height":433,"positionAbsolute":{"x":1718.9773974162958,"y":603.4642741725065},"dragging":false},{"id":"Memory-CTQWu","type":"genericNode","position":{"x":1240.7186213296432,"y":1059.5754404393747},"data":{"type":"Memory","node":{"template":{"_type":"Component","memory":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"memory","value":"","display_name":"External Memory","advanced":true,"input_types":["BaseChatMessageHistory"],"dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"n_messages":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"n_messages","value":100,"display_name":"Number of Messages","advanced":true,"dynamic":false,"info":"Number of messages to retrieve.","title_case":false,"type":"int","_input_type":"IntInput"},"order":{"trace_as_metadata":true,"options":["Ascending","Descending"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"order","value":"Ascending","display_name":"Order","advanced":true,"dynamic":false,"info":"Order of the messages.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User","Machine and User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine and User","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Filter by sender type.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Filter by sender name.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{sender_name}: {text}","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","icon":"message-square-more","base_classes":["BaseChatMemory","Data","Message"],"display_name":"Chat Memory","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"messages","display_name":"Messages (Data)","method":"retrieve_messages","value":"__UNDEFINED__","cache":true},{"types":["Message"],"selected":"Message","name":"messages_text","display_name":"Messages (Text)","method":"retrieve_messages_as_text","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["BaseChatMemory"],"selected":"BaseChatMemory","name":"lc_memory","display_name":"Memory","method":"build_lc_memory","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"Memory-CTQWu"},"selected":false,"width":384,"height":244,"dragging":false,"positionAbsolute":{"x":1240.7186213296432,"y":1059.5754404393747}},{"id":"Prompt-0dWZu","type":"genericNode","position":{"x":1227.4862876736101,"y":616.3826667128244},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"\nYou are a Notion Agent, an AI assistant designed to help users interact with their Notion workspace. Your role is to understand user requests, utilize the appropriate Notion tools to fulfill these requests, and communicate clearly with the user throughout the process.\n\nGeneral Guidelines:\n\n1. Carefully analyze each user request to determine which tool(s) you need to use.\n\n2. Before using any tool, ensure you have all the necessary information. If you need more details, ask the user clear and concise questions.\n\n3. When using a tool, provide a brief explanation to the user about what you're doing and why.\n\n4. After using a tool, interpret the results for the user in a clear, concise manner.\n\n5. If a task requires multiple steps, outline your plan to the user before proceeding.\n\n6. If you encounter an error or limitation, explain it to the user and suggest possible solutions or alternative approaches.\n\n7. Always maintain a helpful and professional tone in your interactions.\n\n8. Be proactive in offering suggestions or alternatives if the user's initial request can't be fulfilled exactly as stated.\n\n9. When providing information or results, focus on relevance and clarity. Summarize when necessary, but provide details when they're important.\n\n10. If a user's request is unclear or could be interpreted in multiple ways, ask for clarification before proceeding.\n\n11. After completing a task, summarize what was accomplished and suggest any relevant next steps or additional actions the user might want to take.\n\n12. If a user asks about capabilities you don't have or tools you can't access, clearly explain your limitations and suggest alternative ways to assist if possible.\n\nRemember, your primary goal is to assist the user effectively with their Notion-related tasks using the provided tools. Always strive for clarity, accuracy, and helpfulness in your interactions. Adapt your communication style to the user's level of technical understanding and familiarity with Notion.\n\nNow, you're ready to assist the user\n\nToday is: {CURRENT_DATE}\n","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"},"CURRENT_DATE":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"CURRENT_DATE","display_name":"CURRENT_DATE","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["CURRENT_DATE"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false,"lf_version":"1.0.17"},"id":"Prompt-0dWZu"},"selected":false,"width":384,"height":416,"positionAbsolute":{"x":1227.4862876736101,"y":616.3826667128244},"dragging":false},{"id":"CurrentDateComponent-NSNQ8","type":"genericNode","position":{"x":1092.5108512311297,"y":868.3249850335523},"data":{"type":"CurrentDateComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from datetime import datetime\r\nfrom zoneinfo import ZoneInfo\r\nfrom typing import List\r\n\r\nfrom langflow.custom import Component\r\nfrom langflow.io import DropdownInput, Output\r\nfrom langflow.schema.message import Message\r\n\r\nclass CurrentDateComponent(Component):\r\n display_name = \"Current Date 🕰️\"\r\n description = \"Returns the current date and time in the selected timezone.\"\r\n icon = \"clock\"\r\n\r\n inputs = [\r\n DropdownInput(\r\n name=\"timezone\",\r\n display_name=\"Timezone\",\r\n options=[\r\n \"UTC\",\r\n \"US/Eastern\",\r\n \"US/Central\",\r\n \"US/Mountain\",\r\n \"US/Pacific\",\r\n \"Europe/London\",\r\n \"Europe/Paris\",\r\n \"Asia/Tokyo\",\r\n \"Australia/Sydney\",\r\n \"America/Sao_Paulo\",\r\n \"America/Cuiaba\",\r\n ],\r\n value=\"UTC\",\r\n info=\"Select the timezone for the current date and time.\",\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Current Date\", name=\"current_date\", method=\"get_current_date\"),\r\n ]\r\n\r\n def get_current_date(self) -> Message:\r\n try:\r\n tz = ZoneInfo(self.timezone)\r\n current_date = datetime.now(tz).strftime(\"%Y-%m-%d %H:%M:%S %Z\")\r\n result = f\"Current date and time in {self.timezone}: {current_date}\"\r\n self.status = result\r\n return Message(text=result)\r\n except Exception as e:\r\n error_message = f\"Error: {str(e)}\"\r\n self.status = error_message\r\n return Message(text=error_message)","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"timezone":{"trace_as_metadata":true,"options":["UTC","US/Eastern","US/Central","US/Mountain","US/Pacific","Europe/London","Europe/Paris","Asia/Tokyo","Australia/Sydney","America/Sao_Paulo","America/Cuiaba"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"timezone","value":"UTC","display_name":"Timezone","advanced":false,"dynamic":false,"info":"Select the timezone for the current date and time.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"Returns the current date and time in the selected timezone.","icon":"clock","base_classes":["Message"],"display_name":"Current Date","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"current_date","display_name":"Current Date","method":"get_current_date","value":"__UNDEFINED__","cache":true}],"field_order":["timezone"],"beta":false,"edited":true,"official":false,"lf_version":"1.0.17"},"id":"CurrentDateComponent-NSNQ8","showNode":false},"selected":false,"width":96,"height":96,"dragging":false,"positionAbsolute":{"x":1092.5108512311297,"y":868.3249850335523}}],"edges":[{"source":"ChatInput-bcq6D","target":"ToolCallingAgent-50Gcd","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bcq6Dœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-ChatInput-bcq6D{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bcq6Dœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-50Gcd{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ToolCallingAgent-50Gcd","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-bcq6D","name":"message","output_types":["Message"]}},"selected":false,"className":""},{"source":"ToolCallingAgent-50Gcd","target":"ChatOutput-TSCup","sourceHandle":"{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-50Gcdœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-TSCupœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","id":"reactflow__edge-ToolCallingAgent-50Gcd{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-50Gcdœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-TSCup{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-TSCupœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-TSCup","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ToolCallingAgent","id":"ToolCallingAgent-50Gcd","name":"response","output_types":["Message"]}},"selected":false,"className":""},{"source":"ToolkitComponent-2lNG0","target":"ToolCallingAgent-50Gcd","sourceHandle":"{œdataTypeœ:œToolkitComponentœ,œidœ:œToolkitComponent-2lNG0œ,œnameœ:œgenerated_toolsœ,œoutput_typesœ:[œBaseToolœ]}","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","id":"reactflow__edge-ToolkitComponent-2lNG0{œdataTypeœ:œToolkitComponentœ,œidœ:œToolkitComponent-2lNG0œ,œnameœ:œgenerated_toolsœ,œoutput_typesœ:[œBaseToolœ]}-ToolCallingAgent-50Gcd{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-50Gcd","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"ToolkitComponent","id":"ToolkitComponent-2lNG0","name":"generated_tools","output_types":["BaseTool"]}},"selected":false,"className":""},{"source":"NotionPageUpdate-6FyYd","sourceHandle":"{œdataTypeœ:œNotionPageUpdateœ,œidœ:œNotionPageUpdate-6FyYdœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionPageUpdate","id":"NotionPageUpdate-6FyYd","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionPageUpdate-6FyYd{œdataTypeœ:œNotionPageUpdateœ,œidœ:œNotionPageUpdate-6FyYdœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionPageCreator-6SCB5","sourceHandle":"{œdataTypeœ:œNotionPageCreatorœ,œidœ:œNotionPageCreator-6SCB5œ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionPageCreator","id":"NotionPageCreator-6SCB5","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionPageCreator-6SCB5{œdataTypeœ:œNotionPageCreatorœ,œidœ:œNotionPageCreator-6SCB5œ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"AddContentToPage-ZezUn","sourceHandle":"{œdataTypeœ:œAddContentToPageœ,œidœ:œAddContentToPage-ZezUnœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"AddContentToPage","id":"AddContentToPage-ZezUn","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-AddContentToPage-ZezUn{œdataTypeœ:œAddContentToPageœ,œidœ:œAddContentToPage-ZezUnœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionDatabaseProperties-aeWil","sourceHandle":"{œdataTypeœ:œNotionDatabasePropertiesœ,œidœ:œNotionDatabaseProperties-aeWilœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionDatabaseProperties","id":"NotionDatabaseProperties-aeWil","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionDatabaseProperties-aeWil{œdataTypeœ:œNotionDatabasePropertiesœ,œidœ:œNotionDatabaseProperties-aeWilœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionListPages-znA3w","sourceHandle":"{œdataTypeœ:œNotionListPagesœ,œidœ:œNotionListPages-znA3wœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionListPages","id":"NotionListPages-znA3w","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionListPages-znA3w{œdataTypeœ:œNotionListPagesœ,œidœ:œNotionListPages-znA3wœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionPageContent-SlL21","sourceHandle":"{œdataTypeœ:œNotionPageContentœ,œidœ:œNotionPageContent-SlL21œ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionPageContent","id":"NotionPageContent-SlL21","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionPageContent-SlL21{œdataTypeœ:œNotionPageContentœ,œidœ:œNotionPageContent-SlL21œ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionUserList-C3eGn","sourceHandle":"{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-C3eGnœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionUserList","id":"NotionUserList-C3eGn","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionUserList-C3eGn{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-C3eGnœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"NotionSearch-VS2mI","sourceHandle":"{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-VS2mIœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolkitComponent-2lNG0","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolkitComponent-2lNG0","inputTypes":["Tool"],"type":"other"},"sourceHandle":{"dataType":"NotionSearch","id":"NotionSearch-VS2mI","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionSearch-VS2mI{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-VS2mIœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolkitComponent-2lNG0{œfieldNameœ:œtoolsœ,œidœ:œToolkitComponent-2lNG0œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"OpenAIModel-BJWIg","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-BJWIgœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}","target":"ToolCallingAgent-50Gcd","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"llm","id":"ToolCallingAgent-50Gcd","inputTypes":["LanguageModel"],"type":"other"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-BJWIg","name":"model_output","output_types":["LanguageModel"]}},"id":"reactflow__edge-OpenAIModel-BJWIg{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-BJWIgœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-50Gcd{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"Memory-CTQWu","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-CTQWuœ,œnameœ:œmessagesœ,œoutput_typesœ:[œDataœ]}","target":"ToolCallingAgent-50Gcd","targetHandle":"{œfieldNameœ:œchat_historyœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"chat_history","id":"ToolCallingAgent-50Gcd","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"Memory","id":"Memory-CTQWu","name":"messages","output_types":["Data"]}},"id":"reactflow__edge-Memory-CTQWu{œdataTypeœ:œMemoryœ,œidœ:œMemory-CTQWuœ,œnameœ:œmessagesœ,œoutput_typesœ:[œDataœ]}-ToolCallingAgent-50Gcd{œfieldNameœ:œchat_historyœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","className":"","selected":false},{"source":"Prompt-0dWZu","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-0dWZuœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"ToolCallingAgent-50Gcd","targetHandle":"{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"system_prompt","id":"ToolCallingAgent-50Gcd","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-0dWZu","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-0dWZu{œdataTypeœ:œPromptœ,œidœ:œPrompt-0dWZuœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-50Gcd{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-50Gcdœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","className":"","selected":false},{"source":"CurrentDateComponent-NSNQ8","sourceHandle":"{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-NSNQ8œ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-0dWZu","targetHandle":"{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-0dWZuœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"CURRENT_DATE","id":"Prompt-0dWZu","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"CurrentDateComponent","id":"CurrentDateComponent-NSNQ8","name":"current_date","output_types":["Message"]}},"id":"reactflow__edge-CurrentDateComponent-NSNQ8{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-NSNQ8œ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}-Prompt-0dWZu{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-0dWZuœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":"","selected":false}],"viewport":{"x":97.72528949998423,"y":-211.85229348429561,"zoom":0.41621432461249197}},"description":"This flow creates an AI assistant that interacts with your Notion workspace. It understands natural language requests, performs actions in Notion (like creating pages or searching for information), and provides helpful responses. To use it, simply start a conversation by asking the agent to perform a Notion-related task, and it will guide you through the process, making it easy to manage your Notion workspace through chat.","name":"Conversational Notion Agent","last_tested_version":"1.0.17","endpoint_name":null,"is_component":false} \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/Meeting_Notes_Agent.json b/docs/docs/Integrations/Notion/Meeting_Notes_Agent.json new file mode 100644 index 000000000000..56d8e74e7a30 --- /dev/null +++ b/docs/docs/Integrations/Notion/Meeting_Notes_Agent.json @@ -0,0 +1 @@ +{"id":"b6de0fdb-31a2-40bf-b921-719bc0890a0e","data":{"nodes":[{"id":"TextInput-iJPEJ","type":"genericNode","position":{"x":94.43614181571661,"y":387.24602783243165},"data":{"type":"TextInput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"Good morning. Thanks for joining this project review meeting. We've got quite a few tasks to discuss, especially some Notion-related ones. Shall we get started?\n\nMorning, Felipe. Absolutely, let's dive in. I see we have several projects and tasks on our plate.\n\nGreat. Let's begin with the AI Content Gen project. I'm currently working on \"Montar base agente seletor de cortes.\" It's in progress, and I'm aiming to complete it by June 14th. Have you had a chance to look at this task, Cezar?\n\nI haven't been directly involved with that one. Can you give me an overview of what it entails?\n\nOf course. Essentially, we're building a base agent that can intelligently select and edit content. It's part of our larger AI-driven content generation initiative. The challenge is creating an algorithm that can understand context and make smart editing decisions.\n\nInteresting. How's the progress so far?\n\nIt's coming along. I've set up the basic framework, but fine-tuning the selection criteria is proving to be more complex than initially anticipated. I might need an extra day or two beyond the June 14th deadline.\n\nUnderstood, Felipe. Keep me posted if you need any resources or if the deadline needs to be adjusted. By the way, I've been meaning to ask - have you had a chance to look into that new NLP library I mentioned last week? I think it could be useful for this project.\n\nActually, Cezar, I haven't gotten to that yet. Should we add it as a new task? Maybe \"Evaluate NLP library for content selection\"?\n\nGood idea. Let's add that to our task list with a due date of next Friday. Now, moving on to the next task in this project - \"Create Notion Task Automation.\" It's assigned to you and set for June 19th, but you haven't started it yet, right? This is where I'd like to focus our discussion today.\n\nThat's correct. So, the goal is to streamline our workflow by automating certain tasks within Notion. I'm thinking we could create scripts or use Notion's API to automatically create, assign, and update tasks based on certain triggers or schedules.\n\nThat sounds like it could save us a lot of time. What specific automations are you considering?\n\nI'm glad you asked, Cezar. I'm thinking of a few key areas:\n1. Automatic task creation based on project milestones\n2. Assigning tasks to team members based on their expertise and current workload\n3. Updating task statuses based on linked database entries\n4. Generating weekly progress reports\n5. Setting up reminders for overdue tasks\n\nThose all sound valuable. Have you looked into the technical requirements for implementing these?\n\nI've done some initial research. Notion's API seems robust enough to handle these automations. We'll likely need to use a combination of Notion's API and a server to run our scripts. I'm thinking of using Node.js for this.\n\nGood thinking. Do you foresee any challenges?\n\nThe main challenge will be ensuring our automations are flexible enough to handle different project structures and team dynamics. We'll need to build in some configurability.\n\nAgreed. Let's make sure we involve the team in defining these automations. Their input will be crucial for making this truly useful. Oh, and speaking of team input, I think we should add a task for \"Conduct team survey on Notion pain points.\" This could help us prioritize which automations to tackle first.\n\nThat's an excellent idea, Cezar. I'll create that task and aim to complete the survey by next Wednesday. Now, I see we have another Notion-related task: \"Subir Notion Agent no Langflow Prod.\" Can you remind me what this entails?\n\nYes, this task is about deploying our Notion integration agent to the Langflow production environment. It's not started yet, but it's a crucial step in making our Notion automations available to the whole team.\n\nI see. What's the timeline for this?\n\nWe haven't set a specific deadline yet, but I think we should aim to complete this shortly after the automation task. Let's tentatively say by the end of June?\n\nSounds reasonable. Make sure to coordinate with the DevOps team for a smooth deployment. And while we're on the topic of deployment, we should probably add a task for \"Create documentation for Notion Agent usage.\" We want to make sure the team knows how to use these new tools once they're available.\n\nYou're right, Felipe. I'll add that to our task list. Now, switching gears a bit, let's talk about the Internal Projects. I see you're working on \"Crypto Links\" - it's in progress.\n\nAh yes, our blockchain initiative. It's moving forward. I'm researching various blockchain platforms and their potential applications for our projects. I'm particularly interested in smart contract capabilities.\n\nInteresting. Keep me updated on any promising findings. By the way, have you considered reaching out to any blockchain experts for consultation? It might be worth adding a task for \"Schedule blockchain expert consultation.\"\n\nThat's a great suggestion, Cezar. I'll add it to my to-do list. Now, for the Internal Tasks, I see you're assigned to \"Revisar modos do Charlinho, preparar para open source.\" What's the status on that?\n\nI haven't started yet, but it's on my radar. The deadline is June 7th, so I'll be diving into it this week. Essentially, we need to review and refine Charlinho's modes before we open-source the project.\n\nSounds good. Let me know if you need any assistance with that. Oh, and don't forget we need to add a task for \"Prepare Charlinho documentation for open source.\" We want to make sure our project is well-documented when we release it.\n\nYou're right, Felipe. I'll make sure to include that in our task list. Now, I see you have several tasks assigned to you in the Internal Tasks section. Can you give me a quick rundown?\n\nOf course. I'm working on finding a freelancer to create flows in ComfyUI - that's in progress and due May 28th. I'm also handling the conception of the Agent UI, due May 30th. Both are moving along well.\n\nThere's also a task to \"Check, install and test Gladia to use a bot in Google Meet.\" That's in progress, and I'm collaborating with C on it.\n\nThat's quite a workload. How are you managing all these tasks?\n\nIt's challenging, but I'm prioritizing based on deadlines and dependencies. The Notion automation project is a high priority because it'll help us manage tasks more efficiently in the long run.\n\nGood strategy, Felipe. Is there anything you need from me or the team to help move these tasks forward?\n\nActually, yes. For the \"pegar os arquivos necessários para tentarmos montar um stinger com ffmpeg\" task, I could use some input on which files are critical for this. It's a low-priority task due June 2nd, but any insights would be helpful.\n\nI'll review our asset library and send you a list of potential files by tomorrow. Oh, and let's add a task for \"Create ffmpeg stinger tutorial\" once we figure out the process. It could be useful for the team in the future.\n\nGreat idea, Cezar. I'll add that to our backlog. Anything else we should discuss?\n\nI think we've covered the major points. Oh, one last thing - for the \"Create Notion Task Automation\" project, I was thinking of setting up a series of short daily meetings next week to keep everyone aligned. What do you think?\n\nThat's a good idea. Maybe 15-minute stand-ups? We can use those to address any roadblocks quickly. And let's add a task for \"Set up Notion Automation progress tracking board\" to help visualize our progress during these stand-ups.\n\nPerfect. I'll send out calendar invites this afternoon and create that tracking board task. Any final thoughts or concerns, Cezar?\n\nNot from my side. I think we have a clear path forward, especially with the Notion-related tasks and the new items we've added to our list.\n\nAgreed. Let's plan to reconvene next week to check on progress, particularly for the Notion automation project and these new tasks we've discussed. Thanks for the comprehensive update, Felipe.\n\nThank you, Cezar. I'll send out a summary of our discussion and action items shortly, including all the new tasks we've identified during this meeting.\n","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Text to be passed as input.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Get text inputs from the Playground.","icon":"type","base_classes":["Message"],"display_name":"Meeting Transcript","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"TextInput-iJPEJ"},"selected":false,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":94.43614181571661,"y":387.24602783243165}},{"id":"NotionUserList-TvIKS","type":"genericNode","position":{"x":80.49204196902156,"y":741.0568511678105},"data":{"type":"NotionUserList","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import List, Dict\nfrom pydantic import BaseModel\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionUserList(LCToolComponent):\n display_name = \"List Users \"\n description = \"Retrieve users from Notion.\"\n documentation = \"https://docs.langflow.org/integrations/notion/list-users\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionUserListSchema(BaseModel):\n pass\n\n def run_model(self) -> List[Data]:\n users = self._list_users()\n records = []\n combined_text = \"\"\n\n for user in users:\n output = \"User:\\n\"\n for key, value in user.items():\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\n output += \"________________________\\n\"\n\n combined_text += output\n records.append(Data(text=output, data=user))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_list_users\",\n description=\"Retrieve users from Notion.\",\n func=self._list_users,\n args_schema=self.NotionUserListSchema,\n )\n\n def _list_users(self) -> List[Dict]:\n url = \"https://api.notion.com/v1/users\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n\n data = response.json()\n results = data[\"results\"]\n\n users = []\n for user in results:\n user_data = {\n \"id\": user[\"id\"],\n \"type\": user[\"type\"],\n \"name\": user.get(\"name\", \"\"),\n \"avatar_url\": user.get(\"avatar_url\", \"\"),\n }\n users.append(user_data)\n\n return users\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieve users from Notion.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Users ","documentation":"https://docs.langflow.org/integrations/notion/list-users","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionUserList-TvIKS","description":"Retrieve users from Notion.","display_name":"List Users "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":80.49204196902156,"y":741.0568511678105},"dragging":false},{"id":"NotionSearch-M66HF","type":"genericNode","position":{"x":1095.6934863134345,"y":407.8718765800806},"data":{"type":"NotionSearch","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import Dict, Any, List\nfrom pydantic import BaseModel, Field\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, DropdownInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionSearch(LCToolComponent):\n display_name: str = \"Search \"\n description: str = \"Searches all pages and databases that have been shared with an integration. The search field can be an empty value to show all values from that search\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n StrInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The text that the API compares page and database titles against.\",\n ),\n DropdownInput(\n name=\"filter_value\",\n display_name=\"Filter Type\",\n info=\"Limits the results to either only pages or only databases.\",\n options=[\"page\", \"database\"],\n value=\"page\",\n ),\n DropdownInput(\n name=\"sort_direction\",\n display_name=\"Sort Direction\",\n info=\"The direction to sort the results.\",\n options=[\"ascending\", \"descending\"],\n value=\"descending\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionSearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query text.\")\n filter_value: str = Field(default=\"page\", description=\"Filter type: 'page' or 'database'.\")\n sort_direction: str = Field(default=\"descending\", description=\"Sort direction: 'ascending' or 'descending'.\")\n\n def run_model(self) -> List[Data]:\n results = self._search_notion(self.query, self.filter_value, self.sort_direction)\n records = []\n combined_text = f\"Results found: {len(results)}\\n\\n\"\n\n for result in results:\n result_data = {\n \"id\": result[\"id\"],\n \"type\": result[\"object\"],\n \"last_edited_time\": result[\"last_edited_time\"],\n }\n\n if result[\"object\"] == \"page\":\n result_data[\"title_or_url\"] = result[\"url\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\n elif result[\"object\"] == \"database\":\n if \"title\" in result and isinstance(result[\"title\"], list) and len(result[\"title\"]) > 0:\n result_data[\"title_or_url\"] = result[\"title\"][0][\"plain_text\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\n else:\n result_data[\"title_or_url\"] = \"N/A\"\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\n\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\n combined_text += text\n records.append(Data(text=text, data=result_data))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_search\",\n description=\"Search Notion pages and databases. Input should include the search query and optionally filter type and sort direction.\",\n func=self._search_notion,\n args_schema=self.NotionSearchSchema,\n )\n\n def _search_notion(\n self, query: str, filter_value: str = \"page\", sort_direction: str = \"descending\"\n ) -> List[Dict[str, Any]]:\n url = \"https://api.notion.com/v1/search\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"query\": query,\n \"filter\": {\"value\": filter_value, \"property\": \"object\"},\n \"sort\": {\"direction\": sort_direction, \"timestamp\": \"last_edited_time\"},\n }\n\n response = requests.post(url, headers=headers, json=data)\n response.raise_for_status()\n\n results = response.json()\n return results[\"results\"]\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filter_value":{"trace_as_metadata":true,"options":["page","database"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"filter_value","value":"database","display_name":"Filter Type","advanced":true,"dynamic":false,"info":"Limits the results to either only pages or only databases.","title_case":false,"type":"str","_input_type":"DropdownInput"},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"query":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"query","value":"","display_name":"Search Query","advanced":true,"dynamic":false,"info":"The text that the API compares page and database titles against.","title_case":false,"type":"str","_input_type":"StrInput"},"sort_direction":{"trace_as_metadata":true,"options":["ascending","descending"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sort_direction","value":"descending","display_name":"Sort Direction","advanced":true,"dynamic":false,"info":"The direction to sort the results.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"Searches all pages and databases that have been shared with an integration. The search field can be an empty value to show all values from that search","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Search ","documentation":"https://docs.langflow.org/integrations/notion/search","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true,"hidden":false}],"field_order":["notion_secret","query","filter_value","sort_direction"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionSearch-M66HF","description":"Searches all pages and databases that have been shared with an integration.","display_name":"Search "},"selected":false,"width":384,"height":386,"positionAbsolute":{"x":1095.6934863134345,"y":407.8718765800806},"dragging":false},{"id":"Prompt-19rub","type":"genericNode","position":{"x":688.7954025956392,"y":456.4686463487848},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"\nYou are an AI assistant specialized in analyzing meeting transcripts and identifying tasks. Your goal is to extract relevant tasks from the given transcript, search for related existing tasks in Notion, and provide a comprehensive list of tasks with their current status and any needed updates.\n\nYou have access to the following input:\n\n\n{TRANSCRIPT}\n\n\n\n{USERS}\n\n\nFollow these steps to complete your task:\n\n1. Carefully read through the transcript and identify any mentioned tasks, action items, or follow-ups.\n\n2. For each identified task:\n a. Use the notion_search tool to find if there's an existing related task in Notion.\n b. If a related task is found, note its ID and current status.\n c. If no related task is found, mark it as a new task.\n\n3. For each task (existing or new), determine:\n a. The task name or description\n b. The assigned person (if mentioned)\n c. The current status (for existing tasks) or suggested status (for new tasks)\n d. Any updates or changes mentioned in the transcript\n\n4. Compile your findings into a list of tasks using the following format:\n\n\n\n[Notion page ID if existing, or \"NEW\" if new task]\n[Task name or description]\n[Assigned person, if mentioned]\n[Current status for existing tasks, or suggested status for new tasks]\n[Any updates or changes mentioned in the transcript]\n\n\n\nRemember to focus on tasks that are directly related to the meeting discussion. Do not include general conversation topics or unrelated mentions as tasks.\n\nProvide your final output in the format specified above, with each task enclosed in its own tags within the overall structure.\n\nToday is: {CURRENT_DATE}\n\n\n\n","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"},"TRANSCRIPT":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"TRANSCRIPT","display_name":"TRANSCRIPT","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"USERS":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"USERS","display_name":"USERS","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"CURRENT_DATE":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"CURRENT_DATE","display_name":"CURRENT_DATE","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["TRANSCRIPT","USERS","CURRENT_DATE"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false,"lf_version":"1.0.17"},"id":"Prompt-19rub"},"selected":false,"width":384,"height":588,"positionAbsolute":{"x":688.7954025956392,"y":456.4686463487848},"dragging":false},{"id":"ParseData-aNk1v","type":"genericNode","position":{"x":540.4151030255898,"y":834.2819856588019},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_metadata":true,"list":false,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"data","value":"","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sep","value":"\n","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{text}","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ParseData-aNk1v","showNode":false},"selected":false,"width":96,"height":96,"dragging":false,"positionAbsolute":{"x":540.4151030255898,"y":834.2819856588019}},{"id":"ToolCallingAgent-rVWeq","type":"genericNode","position":{"x":1566.291217492157,"y":583.6687094567968},"data":{"type":"ToolCallingAgent","node":{"template":{"_type":"Component","chat_history":{"trace_as_metadata":true,"list":true,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"chat_history","value":"","display_name":"Chat History","advanced":true,"input_types":["Data"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"DataInput"},"llm":{"trace_as_metadata":true,"list":false,"required":true,"placeholder":"","show":true,"name":"llm","value":"","display_name":"Language Model","advanced":false,"input_types":["LanguageModel"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"tools":{"trace_as_metadata":true,"list":true,"required":false,"placeholder":"","show":true,"name":"tools","value":"","display_name":"Tools","advanced":false,"input_types":["Tool","BaseTool"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"LangChain\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"handle_parsing_errors":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"handle_parsing_errors","value":true,"display_name":"Handle Parse Errors","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"Analyze this meeting","display_name":"Input","advanced":true,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"max_iterations":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"max_iterations","value":15,"display_name":"Max Iterations","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"int","_input_type":"IntInput"},"system_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_prompt","value":"","display_name":"System Prompt","advanced":false,"input_types":["Message"],"dynamic":false,"info":"System prompt for the agent.","title_case":false,"type":"str","_input_type":"MultilineInput"},"user_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"user_prompt","value":"{input}","display_name":"Prompt","advanced":true,"input_types":["Message"],"dynamic":false,"info":"This prompt must contain 'input' key.","title_case":false,"type":"str","_input_type":"MultilineInput"},"verbose":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"verbose","value":true,"display_name":"Verbose","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Agent that uses tools","icon":"LangChain","base_classes":["AgentExecutor","Message"],"display_name":"Tool Calling Agent","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["AgentExecutor"],"selected":"AgentExecutor","name":"agent","display_name":"Agent","method":"build_agent","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Message"],"selected":"Message","name":"response","display_name":"Response","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","handle_parsing_errors","verbose","max_iterations","tools","llm","system_prompt","user_prompt","chat_history"],"beta":true,"edited":false,"lf_version":"1.0.17"},"id":"ToolCallingAgent-rVWeq"},"selected":false,"width":384,"height":398,"positionAbsolute":{"x":1566.291217492157,"y":583.6687094567968},"dragging":false},{"id":"OpenAIModel-Ht8xI","type":"genericNode","position":{"x":1097.0545781920632,"y":805.60631548423},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":false,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":true,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o","display_name":"Model Name","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"OpenAIModel-Ht8xI"},"selected":false,"width":384,"height":302,"dragging":false,"positionAbsolute":{"x":1097.0545781920632,"y":805.60631548423}},{"id":"Prompt-Lbxk6","type":"genericNode","position":{"x":3042.6844997246735,"y":416.83992118486856},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"\nYou are an AI assistant responsible for updating tasks in Notion based on the information provided from a meeting analysis. Your goal is to create new tasks and update existing ones using the Notion API tools available to you, and then provide a summary in a simple markdown format suitable for a chat interface.\n\nYou have access to the following inputs:\n\n\n{TASK_LIST}\n\n\n\n{DATABASES}\n\n\n\n{USERS}\n\n\nFollow these steps to update the tasks in Notion and generate a markdown summary:\n\n1. Identify the Task database ID from the provided list.\n\n2. Before processing any tasks, retrieve the database properties for the Task database:\n a. Use the notion_database_properties and carefully review the properties, their types, and any options for select or multi-select properties.\n b. Pay attention to the properties format for further usage.\n\n3. For each task in the task list:\n a. If the task ID is \"NEW\", create a new task using the create_notion_page tool.\n b. If the task has an existing ID, update the task using the update_notion_page tool.\n c. Remember to use the properties from the DB retrieved from the notion_database_properties tool\n\n4. When creating a new task:\n a. Use the create_notion_page tool.\n b. Include the task name, assignee (if available), status, and any other relevant properties based on the database structure.\n c. Ensure that the property names and types match exactly with what you retrieved from the notion_database_properties call.\n\n5. When updating an existing task:\n a. Use the update_notion_page tool.\n b. Update the status, assignee, or any other relevant properties mentioned in the field.\n c. Ensure that the property names and types match exactly with what you retrieved from the notion_database_properties call.\n\n6. After each function call, wait for the before proceeding to the next task.\n\n7. If you encounter any errors during the process, note them and continue with the next task.\n\n8. Provide a summary of your actions for each task in a simple markdown format. Use the following structure:\n # Task Update Summary\n\n ## Created Tasks\n - **[Task Name]**: Assigned to [Assignee], Status: [Status]\n - Details: [Brief description of the new task]\n\n ## Updated Tasks\n - **[Task Name]** (ID: [Notion Page ID])\n - Changes: [Brief description of changes]\n - Status: [Success/Error]\n\n ## Errors\n - **[Task Name or ID]**: [Description of the error encountered]\n\n\nRemember to use the exact property names, types, and options as specified in the Notion database properties you retrieved at the beginning. This is crucial for ensuring that all updates and creations are done correctly.\n\nIf you encounter any errors or uncertainties, include them in the Errors section of the markdown summary. With enough detail to the user understand the issues.\n\nProvide your final output as a complete markdown document containing all the tasks you've processed, whether they were created, updated, or encountered errors. Use only basic markdown formatting (headers, bold, lists) to ensure compatibility with chat interfaces. Do not include any XML tags or complex formatting in your final output.\n\nToday is: {CURRENT_DATE}\n\n","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"},"TASK_LIST":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"TASK_LIST","display_name":"TASK_LIST","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"DATABASES":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"DATABASES","display_name":"DATABASES","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"USERS":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"USERS","display_name":"USERS","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"},"CURRENT_DATE":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","password":false,"name":"CURRENT_DATE","display_name":"CURRENT_DATE","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["TASK_LIST","DATABASES","USERS","CURRENT_DATE"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true}],"field_order":["template"],"beta":false,"error":null,"edited":false},"id":"Prompt-Lbxk6"},"selected":false,"width":384,"height":674,"positionAbsolute":{"x":3042.6844997246735,"y":416.83992118486856},"dragging":false},{"id":"ToolCallingAgent-GurdE","type":"genericNode","position":{"x":3974.1377259893243,"y":867.4647271037014},"data":{"type":"ToolCallingAgent","node":{"template":{"_type":"Component","chat_history":{"trace_as_metadata":true,"list":true,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"chat_history","value":"","display_name":"Chat History","advanced":true,"input_types":["Data"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"DataInput"},"llm":{"trace_as_metadata":true,"list":false,"required":true,"placeholder":"","show":true,"name":"llm","value":"","display_name":"Language Model","advanced":false,"input_types":["LanguageModel"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"tools":{"trace_as_metadata":true,"list":true,"required":false,"placeholder":"","show":true,"name":"tools","value":"","display_name":"Tools","advanced":false,"input_types":["Tool","BaseTool"],"dynamic":false,"info":"","title_case":false,"type":"other","_input_type":"HandleInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"LangChain\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"handle_parsing_errors":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"handle_parsing_errors","value":true,"display_name":"Handle Parse Errors","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"Do your task.","display_name":"Input","advanced":true,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageTextInput"},"max_iterations":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"max_iterations","value":15,"display_name":"Max Iterations","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"int","_input_type":"IntInput"},"system_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_prompt","value":"","display_name":"System Prompt","advanced":false,"input_types":["Message"],"dynamic":false,"info":"System prompt for the agent.","title_case":false,"type":"str","_input_type":"MultilineInput"},"user_prompt":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"user_prompt","value":"{input}","display_name":"Prompt","advanced":true,"input_types":["Message"],"dynamic":false,"info":"This prompt must contain 'input' key.","title_case":false,"type":"str","_input_type":"MultilineInput"},"verbose":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"verbose","value":true,"display_name":"Verbose","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Agent that uses tools","icon":"LangChain","base_classes":["AgentExecutor","Message"],"display_name":"Tool Calling Agent","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["AgentExecutor"],"selected":"AgentExecutor","name":"agent","display_name":"Agent","method":"build_agent","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Message"],"selected":"Message","name":"response","display_name":"Response","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","handle_parsing_errors","verbose","max_iterations","tools","llm","system_prompt","user_prompt","chat_history"],"beta":true,"edited":false,"lf_version":"1.0.17"},"id":"ToolCallingAgent-GurdE"},"selected":false,"width":384,"height":398,"positionAbsolute":{"x":3974.1377259893243,"y":867.4647271037014},"dragging":false},{"id":"OpenAIModel-OTfnt","type":"genericNode","position":{"x":3513.5648778762093,"y":710.2099422974287},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","api_key":{"load_from_db":false,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":true,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o","display_name":"Model Name","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"OpenAIModel-OTfnt"},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":3513.5648778762093,"y":710.2099422974287},"dragging":false},{"id":"AddContentToPage-vrAvx","type":"genericNode","position":{"x":2649.2991466550634,"y":1050.6250104897197},"data":{"type":"AddContentToPage","node":{"template":{"_type":"Component","block_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"block_id","value":"","display_name":"Page/Block ID","advanced":true,"dynamic":false,"info":"The ID of the page/block to add the content.","title_case":false,"type":"str","_input_type":"StrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nfrom typing import Dict, Any, Union\nfrom markdown import markdown\nfrom bs4 import BeautifulSoup\nimport requests\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom pydantic import BaseModel, Field\n\n\nclass AddContentToPage(LCToolComponent):\n display_name: str = \"Add Content to Page \"\n description: str = \"Convert markdown text to Notion blocks and append them to a Notion page.\"\n documentation: str = \"https://developers.notion.com/reference/patch-block-children\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n MultilineInput(\n name=\"markdown_text\",\n display_name=\"Markdown Text\",\n info=\"The markdown text to convert to Notion blocks.\",\n ),\n StrInput(\n name=\"block_id\",\n display_name=\"Page/Block ID\",\n info=\"The ID of the page/block to add the content.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class AddContentToPageSchema(BaseModel):\n markdown_text: str = Field(..., description=\"The markdown text to convert to Notion blocks.\")\n block_id: str = Field(..., description=\"The ID of the page/block to add the content.\")\n\n def run_model(self) -> Data:\n result = self._add_content_to_page(self.markdown_text, self.block_id)\n return Data(data=result, text=json.dumps(result))\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"add_content_to_notion_page\",\n description=\"Convert markdown text to Notion blocks and append them to a Notion page.\",\n func=self._add_content_to_page,\n args_schema=self.AddContentToPageSchema,\n )\n\n def _add_content_to_page(self, markdown_text: str, block_id: str) -> Union[Dict[str, Any], str]:\n try:\n html_text = markdown(markdown_text)\n soup = BeautifulSoup(html_text, \"html.parser\")\n blocks = self.process_node(soup)\n\n url = f\"https://api.notion.com/v1/blocks/{block_id}/children\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"children\": blocks,\n }\n\n response = requests.patch(url, headers=headers, json=data)\n response.raise_for_status()\n\n return response.json()\n except requests.exceptions.RequestException as e:\n error_message = f\"Error: Failed to add content to Notion page. {str(e)}\"\n if hasattr(e, \"response\") and e.response is not None:\n error_message += f\" Status code: {e.response.status_code}, Response: {e.response.text}\"\n return error_message\n except Exception as e:\n return f\"Error: An unexpected error occurred while adding content to Notion page. {str(e)}\"\n\n def process_node(self, node):\n blocks = []\n if isinstance(node, str):\n text = node.strip()\n if text:\n if text.startswith(\"#\"):\n heading_level = text.count(\"#\", 0, 6)\n heading_text = text[heading_level:].strip()\n if heading_level == 1:\n blocks.append(self.create_block(\"heading_1\", heading_text))\n elif heading_level == 2:\n blocks.append(self.create_block(\"heading_2\", heading_text))\n elif heading_level == 3:\n blocks.append(self.create_block(\"heading_3\", heading_text))\n else:\n blocks.append(self.create_block(\"paragraph\", text))\n elif node.name == \"h1\":\n blocks.append(self.create_block(\"heading_1\", node.get_text(strip=True)))\n elif node.name == \"h2\":\n blocks.append(self.create_block(\"heading_2\", node.get_text(strip=True)))\n elif node.name == \"h3\":\n blocks.append(self.create_block(\"heading_3\", node.get_text(strip=True)))\n elif node.name == \"p\":\n code_node = node.find(\"code\")\n if code_node:\n code_text = code_node.get_text()\n language, code = self.extract_language_and_code(code_text)\n blocks.append(self.create_block(\"code\", code, language=language))\n elif self.is_table(str(node)):\n blocks.extend(self.process_table(node))\n else:\n blocks.append(self.create_block(\"paragraph\", node.get_text(strip=True)))\n elif node.name == \"ul\":\n blocks.extend(self.process_list(node, \"bulleted_list_item\"))\n elif node.name == \"ol\":\n blocks.extend(self.process_list(node, \"numbered_list_item\"))\n elif node.name == \"blockquote\":\n blocks.append(self.create_block(\"quote\", node.get_text(strip=True)))\n elif node.name == \"hr\":\n blocks.append(self.create_block(\"divider\", \"\"))\n elif node.name == \"img\":\n blocks.append(self.create_block(\"image\", \"\", image_url=node.get(\"src\")))\n elif node.name == \"a\":\n blocks.append(self.create_block(\"bookmark\", node.get_text(strip=True), link_url=node.get(\"href\")))\n elif node.name == \"table\":\n blocks.extend(self.process_table(node))\n\n for child in node.children:\n if isinstance(child, str):\n continue\n blocks.extend(self.process_node(child))\n\n return blocks\n\n def extract_language_and_code(self, code_text):\n lines = code_text.split(\"\\n\")\n language = lines[0].strip()\n code = \"\\n\".join(lines[1:]).strip()\n return language, code\n\n def is_code_block(self, text):\n return text.startswith(\"```\")\n\n def extract_code_block(self, text):\n lines = text.split(\"\\n\")\n language = lines[0].strip(\"`\").strip()\n code = \"\\n\".join(lines[1:]).strip(\"`\").strip()\n return language, code\n\n def is_table(self, text):\n rows = text.split(\"\\n\")\n if len(rows) < 2:\n return False\n\n has_separator = False\n for i, row in enumerate(rows):\n if \"|\" in row:\n cells = [cell.strip() for cell in row.split(\"|\")]\n cells = [cell for cell in cells if cell] # Remove empty cells\n if i == 1 and all(set(cell) <= set(\"-|\") for cell in cells):\n has_separator = True\n elif not cells:\n return False\n\n return has_separator and len(rows) >= 3\n\n def process_list(self, node, list_type):\n blocks = []\n for item in node.find_all(\"li\"):\n item_text = item.get_text(strip=True)\n checked = item_text.startswith(\"[x]\")\n is_checklist = item_text.startswith(\"[ ]\") or checked\n\n if is_checklist:\n item_text = item_text.replace(\"[x]\", \"\").replace(\"[ ]\", \"\").strip()\n blocks.append(self.create_block(\"to_do\", item_text, checked=checked))\n else:\n blocks.append(self.create_block(list_type, item_text))\n return blocks\n\n def process_table(self, node):\n blocks = []\n header_row = node.find(\"thead\").find(\"tr\") if node.find(\"thead\") else None\n body_rows = node.find(\"tbody\").find_all(\"tr\") if node.find(\"tbody\") else []\n\n if header_row or body_rows:\n table_width = max(\n len(header_row.find_all([\"th\", \"td\"])) if header_row else 0,\n max(len(row.find_all([\"th\", \"td\"])) for row in body_rows),\n )\n\n table_block = self.create_block(\"table\", \"\", table_width=table_width, has_column_header=bool(header_row))\n blocks.append(table_block)\n\n if header_row:\n header_cells = [cell.get_text(strip=True) for cell in header_row.find_all([\"th\", \"td\"])]\n header_row_block = self.create_block(\"table_row\", header_cells)\n blocks.append(header_row_block)\n\n for row in body_rows:\n cells = [cell.get_text(strip=True) for cell in row.find_all([\"th\", \"td\"])]\n row_block = self.create_block(\"table_row\", cells)\n blocks.append(row_block)\n\n return blocks\n\n def create_block(self, block_type: str, content: str, **kwargs) -> Dict[str, Any]:\n block: dict[str, Any] = {\n \"object\": \"block\",\n \"type\": block_type,\n block_type: {},\n }\n\n if block_type in [\n \"paragraph\",\n \"heading_1\",\n \"heading_2\",\n \"heading_3\",\n \"bulleted_list_item\",\n \"numbered_list_item\",\n \"quote\",\n ]:\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n elif block_type == \"to_do\":\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n block[block_type][\"checked\"] = kwargs.get(\"checked\", False)\n elif block_type == \"code\":\n block[block_type][\"rich_text\"] = [\n {\n \"type\": \"text\",\n \"text\": {\n \"content\": content,\n },\n }\n ]\n block[block_type][\"language\"] = kwargs.get(\"language\", \"plain text\")\n elif block_type == \"image\":\n block[block_type] = {\"type\": \"external\", \"external\": {\"url\": kwargs.get(\"image_url\", \"\")}}\n elif block_type == \"divider\":\n pass\n elif block_type == \"bookmark\":\n block[block_type][\"url\"] = kwargs.get(\"link_url\", \"\")\n elif block_type == \"table\":\n block[block_type][\"table_width\"] = kwargs.get(\"table_width\", 0)\n block[block_type][\"has_column_header\"] = kwargs.get(\"has_column_header\", False)\n block[block_type][\"has_row_header\"] = kwargs.get(\"has_row_header\", False)\n elif block_type == \"table_row\":\n block[block_type][\"cells\"] = [[{\"type\": \"text\", \"text\": {\"content\": cell}} for cell in content]]\n\n return block\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"markdown_text":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"markdown_text","value":"","display_name":"Markdown Text","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The markdown text to convert to Notion blocks.","title_case":false,"type":"str","_input_type":"MultilineInput"},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Convert markdown text to Notion blocks and append them to a Notion page.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Add Content to Page ","documentation":"https://developers.notion.com/reference/patch-block-children","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["markdown_text","block_id","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"AddContentToPage-vrAvx","description":"Convert markdown text to Notion blocks and append them to a Notion page.","display_name":"Add Content to Page "},"selected":false,"width":384,"height":330,"positionAbsolute":{"x":2649.2991466550634,"y":1050.6250104897197},"dragging":false},{"id":"NotionPageCreator-Exc7f","type":"genericNode","position":{"x":3050.8201437255634,"y":1391.0449862668834},"data":{"type":"NotionPageCreator","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nfrom typing import Dict, Any, Union\nimport requests\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom langflow.io import Output\n\nclass NotionPageCreator(LCToolComponent):\n display_name: str = \"Create Page \"\n description: str = \"A component for creating Notion pages.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-create\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"database_id\",\n display_name=\"Database ID\",\n info=\"The ID of the Notion database.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n MultilineInput(\n name=\"properties_json\",\n display_name=\"Properties (JSON)\",\n info=\"The properties of the new page as a JSON string.\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionPageCreatorSchema(BaseModel):\n database_id: str = Field(..., description=\"The ID of the Notion database.\")\n properties_json: str = Field(..., description=\"The properties of the new page as a JSON string.\")\n\n def run_model(self) -> Data:\n result = self._create_notion_page(self.database_id, self.properties_json)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the created page data\n output = \"Created page properties:\\n\"\n for prop_name, prop_value in result.get(\"properties\", {}).items():\n output += f\"{prop_name}: {prop_value}\\n\"\n return Data(text=output, data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"create_notion_page\",\n description=\"Create a new page in a Notion database. IMPORTANT: Use the tool to check the Database properties for more details before using this tool.\",\n func=self._create_notion_page,\n args_schema=self.NotionPageCreatorSchema,\n )\n\n def _create_notion_page(self, database_id: str, properties_json: str) -> Union[Dict[str, Any], str]:\n if not database_id or not properties_json:\n return \"Invalid input. Please provide 'database_id' and 'properties_json'.\"\n\n try:\n properties = json.loads(properties_json)\n except json.JSONDecodeError as e:\n return f\"Invalid properties format. Please provide a valid JSON string. Error: {str(e)}\"\n\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"parent\": {\"database_id\": database_id},\n \"properties\": properties,\n }\n\n try:\n response = requests.post(\"https://api.notion.com/v1/pages\", headers=headers, json=data)\n response.raise_for_status()\n result = response.json()\n return result\n except requests.exceptions.RequestException as e:\n error_message = f\"Failed to create Notion page. Error: {str(e)}\"\n if hasattr(e, \"response\") and e.response is not None:\n error_message += f\" Status code: {e.response.status_code}, Response: {e.response.text}\"\n return error_message\n\n def __call__(self, *args, **kwargs):\n return self._create_notion_page(*args, **kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"database_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"database_id","value":"","display_name":"Database ID","advanced":true,"dynamic":false,"info":"The ID of the Notion database.","title_case":false,"type":"str","_input_type":"StrInput"},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"properties_json":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"properties_json","value":"","display_name":"Properties (JSON)","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The properties of the new page as a JSON string.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"A component for creating Notion pages.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Create Page ","documentation":"https://docs.langflow.org/integrations/notion/page-create","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["database_id","notion_secret","properties_json"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionPageCreator-Exc7f","description":"A component for creating Notion pages.","display_name":"Create Page "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":3050.8201437255634,"y":1391.0449862668834},"dragging":false},{"id":"NotionDatabaseProperties-IjzLV","type":"genericNode","position":{"x":3053.0023230574693,"y":1061.535907149244},"data":{"type":"NotionDatabaseProperties","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import Dict, Union\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom langflow.io import Output\n\nclass NotionDatabaseProperties(LCToolComponent):\n display_name: str = \"List Database Properties \"\n description: str = \"Retrieve properties of a Notion database.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/list-database-properties\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"database_id\",\n display_name=\"Database ID\",\n info=\"The ID of the Notion database.\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionDatabasePropertiesSchema(BaseModel):\n database_id: str = Field(..., description=\"The ID of the Notion database.\")\n\n def run_model(self) -> Data:\n result = self._fetch_database_properties(self.database_id)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the properties\n return Data(text=str(result), data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_database_properties\",\n description=\"Retrieve properties of a Notion database. Input should include the database ID.\",\n func=self._fetch_database_properties,\n args_schema=self.NotionDatabasePropertiesSchema,\n )\n\n def _fetch_database_properties(self, database_id: str) -> Union[Dict, str]:\n url = f\"https://api.notion.com/v1/databases/{database_id}\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\n }\n try:\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n data = response.json()\n properties = data.get(\"properties\", {})\n return properties\n except requests.exceptions.RequestException as e:\n return f\"Error fetching Notion database properties: {str(e)}\"\n except ValueError as e:\n return f\"Error parsing Notion API response: {str(e)}\"\n except Exception as e:\n return f\"An unexpected error occurred: {str(e)}\"\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"database_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"database_id","value":"","display_name":"Database ID","advanced":true,"dynamic":false,"info":"The ID of the Notion database.","title_case":false,"type":"str","_input_type":"StrInput"},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieve properties of a Notion database.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Database Properties ","documentation":"https://docs.langflow.org/integrations/notion/list-database-properties","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["database_id","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionDatabaseProperties-IjzLV","description":"Retrieve properties of a Notion database.","display_name":"List Database Properties "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":3053.0023230574693,"y":1061.535907149244},"dragging":false},{"id":"NotionPageUpdate-bexvy","type":"genericNode","position":{"x":2649.2991466550625,"y":1385.262204377853},"data":{"type":"NotionPageUpdate","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nimport requests\nfrom typing import Dict, Any, Union\nfrom pydantic import BaseModel, Field\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\nfrom loguru import logger\nfrom langflow.io import Output\n\nclass NotionPageUpdate(LCToolComponent):\n display_name: str = \"Update Page Property \"\n description: str = \"Update the properties of a Notion page.\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/page-update\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n StrInput(\n name=\"page_id\",\n display_name=\"Page ID\",\n info=\"The ID of the Notion page to update.\",\n ),\n MultilineInput(\n name=\"properties\",\n display_name=\"Properties\",\n info=\"The properties to update on the page (as a JSON string or a dictionary).\",\n ),\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionPageUpdateSchema(BaseModel):\n page_id: str = Field(..., description=\"The ID of the Notion page to update.\")\n properties: Union[str, Dict[str, Any]] = Field(\n ..., description=\"The properties to update on the page (as a JSON string or a dictionary).\"\n )\n\n def run_model(self) -> Data:\n result = self._update_notion_page(self.page_id, self.properties)\n if isinstance(result, str):\n # An error occurred, return it as text\n return Data(text=result)\n else:\n # Success, return the updated page data\n output = \"Updated page properties:\\n\"\n for prop_name, prop_value in result.get(\"properties\", {}).items():\n output += f\"{prop_name}: {prop_value}\\n\"\n return Data(text=output, data=result)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"update_notion_page\",\n description=\"Update the properties of a Notion page. IMPORTANT: Use the tool to check the Database properties for more details before using this tool.\",\n func=self._update_notion_page,\n args_schema=self.NotionPageUpdateSchema,\n )\n\n def _update_notion_page(self, page_id: str, properties: Union[str, Dict[str, Any]]) -> Union[Dict[str, Any], str]:\n url = f\"https://api.notion.com/v1/pages/{page_id}\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\", # Use the latest supported version\n }\n\n # Parse properties if it's a string\n if isinstance(properties, str):\n try:\n parsed_properties = json.loads(properties)\n except json.JSONDecodeError as e:\n error_message = f\"Invalid JSON format for properties: {str(e)}\"\n logger.error(error_message)\n return error_message\n\n else:\n parsed_properties = properties\n\n data = {\"properties\": parsed_properties}\n\n try:\n logger.info(f\"Sending request to Notion API: URL: {url}, Data: {json.dumps(data)}\")\n response = requests.patch(url, headers=headers, json=data)\n response.raise_for_status()\n updated_page = response.json()\n\n logger.info(f\"Successfully updated Notion page. Response: {json.dumps(updated_page)}\")\n return updated_page\n except requests.exceptions.HTTPError as e:\n error_message = f\"HTTP Error occurred: {str(e)}\"\n if e.response is not None:\n error_message += f\"\\nStatus code: {e.response.status_code}\"\n error_message += f\"\\nResponse body: {e.response.text}\"\n logger.error(error_message)\n return error_message\n except requests.exceptions.RequestException as e:\n error_message = f\"An error occurred while making the request: {str(e)}\"\n logger.error(error_message)\n return error_message\n except Exception as e:\n error_message = f\"An unexpected error occurred: {str(e)}\"\n logger.error(error_message)\n return error_message\n\n def __call__(self, *args, **kwargs):\n return self._update_notion_page(*args, **kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"page_id":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"page_id","value":"","display_name":"Page ID","advanced":true,"dynamic":false,"info":"The ID of the Notion page to update.","title_case":false,"type":"str","_input_type":"StrInput"},"properties":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"properties","value":"","display_name":"Properties","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The properties to update on the page (as a JSON string or a dictionary).","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Update the properties of a Notion page.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"Update Page Property ","documentation":"https://docs.langflow.org/integrations/notion/page-update","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true}],"field_order":["page_id","properties","notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionPageUpdate-bexvy","description":"Update the properties of a Notion page.","display_name":"Update Page Property "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":2649.2991466550625,"y":1385.262204377853},"dragging":false},{"id":"NotionSearch-EdSJb","type":"genericNode","position":{"x":2435.4455721283834,"y":357.45573905064634},"data":{"type":"NotionSearch","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import Dict, Any, List\nfrom pydantic import BaseModel, Field\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, StrInput, DropdownInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionSearch(LCToolComponent):\n display_name: str = \"Search \"\n description: str = \"Searches all pages and databases that have been shared with an integration. The search field can be an empty value to show all values from that search\"\n documentation: str = \"https://docs.langflow.org/integrations/notion/search\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n StrInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The text that the API compares page and database titles against.\",\n ),\n DropdownInput(\n name=\"filter_value\",\n display_name=\"Filter Type\",\n info=\"Limits the results to either only pages or only databases.\",\n options=[\"page\", \"database\"],\n value=\"page\",\n ),\n DropdownInput(\n name=\"sort_direction\",\n display_name=\"Sort Direction\",\n info=\"The direction to sort the results.\",\n options=[\"ascending\", \"descending\"],\n value=\"descending\",\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionSearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query text.\")\n filter_value: str = Field(default=\"page\", description=\"Filter type: 'page' or 'database'.\")\n sort_direction: str = Field(default=\"descending\", description=\"Sort direction: 'ascending' or 'descending'.\")\n\n def run_model(self) -> List[Data]:\n results = self._search_notion(self.query, self.filter_value, self.sort_direction)\n records = []\n combined_text = f\"Results found: {len(results)}\\n\\n\"\n\n for result in results:\n result_data = {\n \"id\": result[\"id\"],\n \"type\": result[\"object\"],\n \"last_edited_time\": result[\"last_edited_time\"],\n }\n\n if result[\"object\"] == \"page\":\n result_data[\"title_or_url\"] = result[\"url\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['url']}\\n\"\n elif result[\"object\"] == \"database\":\n if \"title\" in result and isinstance(result[\"title\"], list) and len(result[\"title\"]) > 0:\n result_data[\"title_or_url\"] = result[\"title\"][0][\"plain_text\"]\n text = f\"id: {result['id']}\\ntitle_or_url: {result['title'][0]['plain_text']}\\n\"\n else:\n result_data[\"title_or_url\"] = \"N/A\"\n text = f\"id: {result['id']}\\ntitle_or_url: N/A\\n\"\n\n text += f\"type: {result['object']}\\nlast_edited_time: {result['last_edited_time']}\\n\\n\"\n combined_text += text\n records.append(Data(text=text, data=result_data))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_search\",\n description=\"Search Notion pages and databases. Input should include the search query and optionally filter type and sort direction.\",\n func=self._search_notion,\n args_schema=self.NotionSearchSchema,\n )\n\n def _search_notion(\n self, query: str, filter_value: str = \"page\", sort_direction: str = \"descending\"\n ) -> List[Dict[str, Any]]:\n url = \"https://api.notion.com/v1/search\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Content-Type\": \"application/json\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n data = {\n \"query\": query,\n \"filter\": {\"value\": filter_value, \"property\": \"object\"},\n \"sort\": {\"direction\": sort_direction, \"timestamp\": \"last_edited_time\"},\n }\n\n response = requests.post(url, headers=headers, json=data)\n response.raise_for_status()\n\n results = response.json()\n return results[\"results\"]\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"filter_value":{"trace_as_metadata":true,"options":["page","database"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"filter_value","value":"database","display_name":"Filter Type","advanced":true,"dynamic":false,"info":"Limits the results to either only pages or only databases.","title_case":false,"type":"str","_input_type":"DropdownInput"},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"query":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"query","value":"","display_name":"Search Query","advanced":true,"dynamic":false,"info":"The text that the API compares page and database titles against.","title_case":false,"type":"str","_input_type":"StrInput"},"sort_direction":{"trace_as_metadata":true,"options":["ascending","descending"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sort_direction","value":"descending","display_name":"Sort Direction","advanced":true,"dynamic":false,"info":"The direction to sort the results.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"List All Databases","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Databases","documentation":"https://docs.langflow.org/integrations/notion/search","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true,"hidden":false},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["notion_secret","query","filter_value","sort_direction"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionSearch-EdSJb","description":"Searches all pages and databases that have been shared with an integration.","display_name":"Search "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":2435.4455721283834,"y":357.45573905064634},"dragging":false},{"id":"ParseData-vYVwu","type":"genericNode","position":{"x":2871.5903532688335,"y":563.1965154816405},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_metadata":true,"list":false,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"data","value":"","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sep","value":"\n","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{text}","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ParseData-vYVwu","showNode":false},"selected":false,"width":96,"height":96,"positionAbsolute":{"x":2871.5903532688335,"y":563.1965154816405},"dragging":false},{"id":"ChatOutput-zBv53","type":"genericNode","position":{"x":4429.812566227955,"y":940.6072472757681},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"data_template","value":"{text}","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"AI","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ChatOutput-zBv53"},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":4429.812566227955,"y":940.6072472757681},"dragging":false},{"id":"NotionUserList-wFEb1","type":"genericNode","position":{"x":2390.6365450681037,"y":694.4867003504073},"data":{"type":"NotionUserList","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import requests\nfrom typing import List, Dict\nfrom pydantic import BaseModel\nfrom langflow.io import Output\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\nfrom langchain.tools import StructuredTool\n\n\nclass NotionUserList(LCToolComponent):\n display_name = \"List Users \"\n description = \"Retrieve users from Notion.\"\n documentation = \"https://docs.langflow.org/integrations/notion/list-users\"\n icon = \"NotionDirectoryLoader\"\n\n inputs = [\n SecretStrInput(\n name=\"notion_secret\",\n display_name=\"Notion Secret\",\n info=\"The Notion integration token.\",\n required=True,\n ),\n ]\n outputs = [\n Output(name=\"example_output\", display_name=\"Data\", method=\"run_model\"),\n Output(name=\"example_tool_output\", display_name=\"Tool\", method=\"build_tool\"),\n ]\n\n class NotionUserListSchema(BaseModel):\n pass\n\n def run_model(self) -> List[Data]:\n users = self._list_users()\n records = []\n combined_text = \"\"\n\n for user in users:\n output = \"User:\\n\"\n for key, value in user.items():\n output += f\"{key.replace('_', ' ').title()}: {value}\\n\"\n output += \"________________________\\n\"\n\n combined_text += output\n records.append(Data(text=output, data=user))\n\n self.status = records\n return records\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"notion_list_users\",\n description=\"Retrieve users from Notion.\",\n func=self._list_users,\n args_schema=self.NotionUserListSchema,\n )\n\n def _list_users(self) -> List[Dict]:\n url = \"https://api.notion.com/v1/users\"\n headers = {\n \"Authorization\": f\"Bearer {self.notion_secret}\",\n \"Notion-Version\": \"2022-06-28\",\n }\n\n response = requests.get(url, headers=headers)\n response.raise_for_status()\n\n data = response.json()\n results = data[\"results\"]\n\n users = []\n for user in results:\n user_data = {\n \"id\": user[\"id\"],\n \"type\": user[\"type\"],\n \"name\": user.get(\"name\", \"\"),\n \"avatar_url\": user.get(\"avatar_url\", \"\"),\n }\n users.append(user_data)\n\n return users\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"notion_secret":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"notion_secret","value":"","display_name":"Notion Secret","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Notion integration token.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Retrieve users from Notion.","icon":"NotionDirectoryLoader","base_classes":["Data","Tool"],"display_name":"List Users ","documentation":"https://docs.langflow.org/integrations/notion/list-users","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"example_output","display_name":"Data","method":"run_model","value":"__UNDEFINED__","cache":true},{"types":["Tool"],"selected":"Tool","name":"example_tool_output","display_name":"Tool","method":"build_tool","value":"__UNDEFINED__","cache":true,"hidden":true}],"field_order":["notion_secret"],"beta":false,"edited":true,"lf_version":"1.0.17"},"id":"NotionUserList-wFEb1","description":"Retrieve users from Notion.","display_name":"List Users "},"selected":false,"width":384,"height":302,"positionAbsolute":{"x":2390.6365450681037,"y":694.4867003504073},"dragging":false},{"id":"ParseData-WKjW6","type":"genericNode","position":{"x":2877.571533084884,"y":856.8480898893301},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_metadata":true,"list":false,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"data","value":"","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sep","value":"\n","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{text}","display_name":"Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false,"lf_version":"1.0.17"},"id":"ParseData-WKjW6","showNode":false},"selected":false,"width":96,"height":96,"positionAbsolute":{"x":2877.571533084884,"y":856.8480898893301},"dragging":false},{"id":"CurrentDateComponent-WOwNq","type":"genericNode","position":{"x":536.7929500860405,"y":617.6055631700241},"data":{"type":"CurrentDateComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from datetime import datetime\r\nfrom zoneinfo import ZoneInfo\r\nfrom typing import List\r\n\r\nfrom langflow.custom import Component\r\nfrom langflow.io import DropdownInput, Output\r\nfrom langflow.schema.message import Message\r\n\r\nclass CurrentDateComponent(Component):\r\n display_name = \"Current Date 🕰️\"\r\n description = \"Returns the current date and time in the selected timezone.\"\r\n icon = \"clock\"\r\n\r\n inputs = [\r\n DropdownInput(\r\n name=\"timezone\",\r\n display_name=\"Timezone\",\r\n options=[\r\n \"UTC\",\r\n \"US/Eastern\",\r\n \"US/Central\",\r\n \"US/Mountain\",\r\n \"US/Pacific\",\r\n \"Europe/London\",\r\n \"Europe/Paris\",\r\n \"Asia/Tokyo\",\r\n \"Australia/Sydney\",\r\n \"America/Sao_Paulo\",\r\n \"America/Cuiaba\",\r\n ],\r\n value=\"UTC\",\r\n info=\"Select the timezone for the current date and time.\",\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Current Date\", name=\"current_date\", method=\"get_current_date\"),\r\n ]\r\n\r\n def get_current_date(self) -> Message:\r\n try:\r\n tz = ZoneInfo(self.timezone)\r\n current_date = datetime.now(tz).strftime(\"%Y-%m-%d %H:%M:%S %Z\")\r\n result = f\"Current date and time in {self.timezone}: {current_date}\"\r\n self.status = result\r\n return Message(text=result)\r\n except Exception as e:\r\n error_message = f\"Error: {str(e)}\"\r\n self.status = error_message\r\n return Message(text=error_message)","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"timezone":{"trace_as_metadata":true,"options":["UTC","US/Eastern","US/Central","US/Mountain","US/Pacific","Europe/London","Europe/Paris","Asia/Tokyo","Australia/Sydney","America/Sao_Paulo","America/Cuiaba"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"timezone","value":"UTC","display_name":"Timezone","advanced":false,"dynamic":false,"info":"Select the timezone for the current date and time.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"Returns the current date and time in the selected timezone.","icon":"clock","base_classes":["Message"],"display_name":"Current Date","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"current_date","display_name":"Current Date","method":"get_current_date","value":"__UNDEFINED__","cache":true}],"field_order":["timezone"],"beta":false,"edited":true},"id":"CurrentDateComponent-WOwNq","showNode":false},"selected":false,"width":96,"height":96,"dragging":false,"positionAbsolute":{"x":536.7929500860405,"y":617.6055631700241}},{"id":"CurrentDateComponent-PZ8xJ","type":"genericNode","position":{"x":2871.6341688682833,"y":453.3374434097356},"data":{"type":"CurrentDateComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from datetime import datetime\r\nfrom zoneinfo import ZoneInfo\r\nfrom typing import List\r\n\r\nfrom langflow.custom import Component\r\nfrom langflow.io import DropdownInput, Output\r\nfrom langflow.schema.message import Message\r\n\r\nclass CurrentDateComponent(Component):\r\n display_name = \"Current Date 🕰️\"\r\n description = \"Returns the current date and time in the selected timezone.\"\r\n icon = \"clock\"\r\n\r\n inputs = [\r\n DropdownInput(\r\n name=\"timezone\",\r\n display_name=\"Timezone\",\r\n options=[\r\n \"UTC\",\r\n \"US/Eastern\",\r\n \"US/Central\",\r\n \"US/Mountain\",\r\n \"US/Pacific\",\r\n \"Europe/London\",\r\n \"Europe/Paris\",\r\n \"Asia/Tokyo\",\r\n \"Australia/Sydney\",\r\n \"America/Sao_Paulo\",\r\n \"America/Cuiaba\",\r\n ],\r\n value=\"UTC\",\r\n info=\"Select the timezone for the current date and time.\",\r\n ),\r\n ]\r\n\r\n outputs = [\r\n Output(display_name=\"Current Date\", name=\"current_date\", method=\"get_current_date\"),\r\n ]\r\n\r\n def get_current_date(self) -> Message:\r\n try:\r\n tz = ZoneInfo(self.timezone)\r\n current_date = datetime.now(tz).strftime(\"%Y-%m-%d %H:%M:%S %Z\")\r\n result = f\"Current date and time in {self.timezone}: {current_date}\"\r\n self.status = result\r\n return Message(text=result)\r\n except Exception as e:\r\n error_message = f\"Error: {str(e)}\"\r\n self.status = error_message\r\n return Message(text=error_message)","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"timezone":{"trace_as_metadata":true,"options":["UTC","US/Eastern","US/Central","US/Mountain","US/Pacific","Europe/London","Europe/Paris","Asia/Tokyo","Australia/Sydney","America/Sao_Paulo","America/Cuiaba"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"timezone","value":"UTC","display_name":"Timezone","advanced":false,"dynamic":false,"info":"Select the timezone for the current date and time.","title_case":false,"type":"str","_input_type":"DropdownInput"}},"description":"Returns the current date and time in the selected timezone.","icon":"clock","base_classes":["Message"],"display_name":"Current Date","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"current_date","display_name":"Current Date","method":"get_current_date","value":"__UNDEFINED__","cache":true}],"field_order":["timezone"],"beta":false,"edited":true,"official":false},"id":"CurrentDateComponent-PZ8xJ","showNode":false},"selected":false,"width":96,"height":96,"dragging":false,"positionAbsolute":{"x":2871.6341688682833,"y":453.3374434097356}}],"edges":[{"source":"TextInput-iJPEJ","sourceHandle":"{œdataTypeœ:œTextInputœ,œidœ:œTextInput-iJPEJœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-19rub","targetHandle":"{œfieldNameœ:œTRANSCRIPTœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"TRANSCRIPT","id":"Prompt-19rub","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"TextInput","id":"TextInput-iJPEJ","name":"text","output_types":["Message"]}},"id":"reactflow__edge-TextInput-iJPEJ{œdataTypeœ:œTextInputœ,œidœ:œTextInput-iJPEJœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-19rub{œfieldNameœ:œTRANSCRIPTœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"NotionUserList-TvIKS","sourceHandle":"{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-TvIKSœ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-aNk1v","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-aNk1vœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-aNk1v","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"NotionUserList","id":"NotionUserList-TvIKS","name":"example_output","output_types":["Data"]}},"id":"reactflow__edge-NotionUserList-TvIKS{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-TvIKSœ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}-ParseData-aNk1v{œfieldNameœ:œdataœ,œidœ:œParseData-aNk1vœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"ParseData-aNk1v","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-aNk1vœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-19rub","targetHandle":"{œfieldNameœ:œUSERSœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"USERS","id":"Prompt-19rub","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-aNk1v","name":"text","output_types":["Message"]}},"id":"reactflow__edge-ParseData-aNk1v{œdataTypeœ:œParseDataœ,œidœ:œParseData-aNk1vœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-19rub{œfieldNameœ:œUSERSœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"Prompt-19rub","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-19rubœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"ToolCallingAgent-rVWeq","targetHandle":"{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"system_prompt","id":"ToolCallingAgent-rVWeq","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-19rub","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-19rub{œdataTypeœ:œPromptœ,œidœ:œPrompt-19rubœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-rVWeq{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"NotionSearch-M66HF","sourceHandle":"{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-M66HFœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolCallingAgent-rVWeq","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-rVWeq","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"NotionSearch","id":"NotionSearch-M66HF","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionSearch-M66HF{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-M66HFœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-rVWeq{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"OpenAIModel-Ht8xI","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Ht8xIœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}","target":"ToolCallingAgent-rVWeq","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"llm","id":"ToolCallingAgent-rVWeq","inputTypes":["LanguageModel"],"type":"other"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-Ht8xI","name":"model_output","output_types":["LanguageModel"]}},"id":"reactflow__edge-OpenAIModel-Ht8xI{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Ht8xIœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-rVWeq{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-rVWeqœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"ToolCallingAgent-rVWeq","sourceHandle":"{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-rVWeqœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-Lbxk6","targetHandle":"{œfieldNameœ:œTASK_LISTœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"TASK_LIST","id":"Prompt-Lbxk6","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ToolCallingAgent","id":"ToolCallingAgent-rVWeq","name":"response","output_types":["Message"]}},"id":"reactflow__edge-ToolCallingAgent-rVWeq{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-rVWeqœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Prompt-Lbxk6{œfieldNameœ:œTASK_LISTœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"OpenAIModel-OTfnt","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-OTfntœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"llm","id":"ToolCallingAgent-GurdE","inputTypes":["LanguageModel"],"type":"other"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-OTfnt","name":"model_output","output_types":["LanguageModel"]}},"id":"reactflow__edge-OpenAIModel-OTfnt{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-OTfntœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"Prompt-Lbxk6","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-Lbxk6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"system_prompt","id":"ToolCallingAgent-GurdE","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-Lbxk6","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-Lbxk6{œdataTypeœ:œPromptœ,œidœ:œPrompt-Lbxk6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œsystem_promptœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"AddContentToPage-vrAvx","sourceHandle":"{œdataTypeœ:œAddContentToPageœ,œidœ:œAddContentToPage-vrAvxœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-GurdE","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"AddContentToPage","id":"AddContentToPage-vrAvx","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-AddContentToPage-vrAvx{œdataTypeœ:œAddContentToPageœ,œidœ:œAddContentToPage-vrAvxœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"NotionPageCreator-Exc7f","sourceHandle":"{œdataTypeœ:œNotionPageCreatorœ,œidœ:œNotionPageCreator-Exc7fœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-GurdE","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"NotionPageCreator","id":"NotionPageCreator-Exc7f","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionPageCreator-Exc7f{œdataTypeœ:œNotionPageCreatorœ,œidœ:œNotionPageCreator-Exc7fœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"NotionDatabaseProperties-IjzLV","sourceHandle":"{œdataTypeœ:œNotionDatabasePropertiesœ,œidœ:œNotionDatabaseProperties-IjzLVœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-GurdE","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"NotionDatabaseProperties","id":"NotionDatabaseProperties-IjzLV","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionDatabaseProperties-IjzLV{œdataTypeœ:œNotionDatabasePropertiesœ,œidœ:œNotionDatabaseProperties-IjzLVœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"NotionPageUpdate-bexvy","sourceHandle":"{œdataTypeœ:œNotionPageUpdateœ,œidœ:œNotionPageUpdate-bexvyœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}","target":"ToolCallingAgent-GurdE","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"tools","id":"ToolCallingAgent-GurdE","inputTypes":["Tool","BaseTool"],"type":"other"},"sourceHandle":{"dataType":"NotionPageUpdate","id":"NotionPageUpdate-bexvy","name":"example_tool_output","output_types":["Tool"]}},"id":"reactflow__edge-NotionPageUpdate-bexvy{œdataTypeœ:œNotionPageUpdateœ,œidœ:œNotionPageUpdate-bexvyœ,œnameœ:œexample_tool_outputœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-GurdE{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-GurdEœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"NotionSearch-EdSJb","sourceHandle":"{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-EdSJbœ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-vYVwu","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-vYVwuœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-vYVwu","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"NotionSearch","id":"NotionSearch-EdSJb","name":"example_output","output_types":["Data"]}},"id":"reactflow__edge-NotionSearch-EdSJb{œdataTypeœ:œNotionSearchœ,œidœ:œNotionSearch-EdSJbœ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}-ParseData-vYVwu{œfieldNameœ:œdataœ,œidœ:œParseData-vYVwuœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","selected":false,"className":""},{"source":"ParseData-vYVwu","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-vYVwuœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-Lbxk6","targetHandle":"{œfieldNameœ:œDATABASESœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"DATABASES","id":"Prompt-Lbxk6","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-vYVwu","name":"text","output_types":["Message"]}},"id":"reactflow__edge-ParseData-vYVwu{œdataTypeœ:œParseDataœ,œidœ:œParseData-vYVwuœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-Lbxk6{œfieldNameœ:œDATABASESœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"ToolCallingAgent-GurdE","sourceHandle":"{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-GurdEœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-zBv53","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-zBv53œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-zBv53","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ToolCallingAgent","id":"ToolCallingAgent-GurdE","name":"response","output_types":["Message"]}},"id":"reactflow__edge-ToolCallingAgent-GurdE{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-GurdEœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-zBv53{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-zBv53œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","selected":false,"className":""},{"source":"NotionUserList-wFEb1","sourceHandle":"{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-wFEb1œ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-WKjW6","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-WKjW6œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-WKjW6","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"NotionUserList","id":"NotionUserList-wFEb1","name":"example_output","output_types":["Data"]}},"id":"reactflow__edge-NotionUserList-wFEb1{œdataTypeœ:œNotionUserListœ,œidœ:œNotionUserList-wFEb1œ,œnameœ:œexample_outputœ,œoutput_typesœ:[œDataœ]}-ParseData-WKjW6{œfieldNameœ:œdataœ,œidœ:œParseData-WKjW6œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","className":""},{"source":"ParseData-WKjW6","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-WKjW6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-Lbxk6","targetHandle":"{œfieldNameœ:œUSERSœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"USERS","id":"Prompt-Lbxk6","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-WKjW6","name":"text","output_types":["Message"]}},"id":"reactflow__edge-ParseData-WKjW6{œdataTypeœ:œParseDataœ,œidœ:œParseData-WKjW6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-Lbxk6{œfieldNameœ:œUSERSœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"CurrentDateComponent-WOwNq","sourceHandle":"{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-WOwNqœ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-19rub","targetHandle":"{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"CURRENT_DATE","id":"Prompt-19rub","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"CurrentDateComponent","id":"CurrentDateComponent-WOwNq","name":"current_date","output_types":["Message"]}},"id":"reactflow__edge-CurrentDateComponent-WOwNq{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-WOwNqœ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}-Prompt-19rub{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-19rubœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""},{"source":"CurrentDateComponent-PZ8xJ","sourceHandle":"{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-PZ8xJœ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-Lbxk6","targetHandle":"{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"CURRENT_DATE","id":"Prompt-Lbxk6","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"CurrentDateComponent","id":"CurrentDateComponent-PZ8xJ","name":"current_date","output_types":["Message"]}},"id":"reactflow__edge-CurrentDateComponent-PZ8xJ{œdataTypeœ:œCurrentDateComponentœ,œidœ:œCurrentDateComponent-PZ8xJœ,œnameœ:œcurrent_dateœ,œoutput_typesœ:[œMessageœ]}-Prompt-Lbxk6{œfieldNameœ:œCURRENT_DATEœ,œidœ:œPrompt-Lbxk6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","className":""}],"viewport":{"x":-65.48833753518215,"y":119.49034539812101,"zoom":0.5588906662759379}},"description":"The Notion Agent for Meeting Notes is an AI-powered tool that automatically processes meeting transcripts and updates your Notion workspace accordingly. It identifies tasks, action items, and key points from your meetings, then creates new tasks or updates existing ones in Notion without manual input.\n\nTo use it, simply add your API Keys and provide a meeting transcript. The agent will analyze it, interact with your Notion workspace to make necessary updates, and give you a summary of actions taken. This streamlines your workflow, ensuring important meeting outcomes are captured and organized in Notion effortlessly.","name":"Notion Agent - Meeting Notes ","last_tested_version":"1.0.17.dev8","endpoint_name":null,"is_component":false} \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/_category_.json b/docs/docs/Integrations/Notion/_category_.json new file mode 100644 index 000000000000..c245462efa38 --- /dev/null +++ b/docs/docs/Integrations/Notion/_category_.json @@ -0,0 +1 @@ +{"position":5, "label":"Notion"} \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/integrations-notion.md b/docs/docs/Integrations/Notion/integrations-notion.md new file mode 100644 index 000000000000..6664991a0717 --- /dev/null +++ b/docs/docs/Integrations/Notion/integrations-notion.md @@ -0,0 +1,89 @@ +--- +title: Setup +sidebar_position: 0 +slug: /integrations/notion/setup +--- + +# Set up a Notion App + +To use Notion components in Langflow, you first need to create a Notion integration and configure it with the necessary capabilities. This guide will walk you through the process of setting up a Notion integration and granting it access to your Notion databases. + +## Prerequisites + +- A Notion account with access to the workspace where you want to use the integration. +- Admin permissions in the Notion workspace to create and manage integrations. + +## Create a Notion Integration + +1. Go to the [Notion Integrations](https://www.notion.com/my-integrations) page. +2. Click on the "New integration" button. +3. Give your integration a name and select the workspace where you want to use it. +4. Click "Submit" to create the integration. + +:::info +When creating the integration, make sure to enable the necessary capabilities based on your requirements. Refer to the [Notion Integration Capabilities](https://developers.notion.com/reference/capabilities) documentation for more information on each capability. +::: + + +## Configure Integration Capabilities + +After creating the integration, you need to configure its capabilities to define what actions it can perform and what data it can access. + +1. In the integration settings page, go to the **Capabilities** tab. +2. Enable the required capabilities for your integration. For example: + - If your integration needs to read data from Notion, enable the "Read content" capability. + - If your integration needs to create new content in Notion, enable the "Insert content" capability. + - If your integration needs to update existing content in Notion, enable the "Update content" capability. +3. Configure the user information access level based on your integration's requirements. +4. Save the changes. + +## Obtain Integration Token + +To authenticate your integration with Notion, you need to obtain an integration token. + +1. In the integration settings page, go to the "Secrets" tab. +2. Copy the "Internal Integration Token" value. This token will be used to authenticate your integration with Notion. + +:::warning +Your integration token is a sensitive piece of information. Make sure to keep it secure and never share it publicly. Store it safely in your Langflow configuration or environment variables. +::: + +## Grant Integration Access to Notion Databases + +For your integration to interact with Notion databases, you need to grant it access to the specific databases it will be working with. + +1. Open the Notion database that you want your integration to access. +2. Click on the "Share" button in the top-right corner of the page. +3. In the "Invite" section, select your integration from the list. +4. Click "Invite" to grant the integration access to the database. + +:::info +If your database contains references to other databases, you need to grant the integration access to those referenced databases as well. Repeat step 4 for each referenced database to ensure your integration has the necessary access. +::: + +## Build with Notion Components in Langflow + +Once you have set up your Notion integration and granted it access to the required databases, you can start using the Notion components in Langflow. + +Langflow provides the following Notion components: + +- **Search**: Searches all pages and databases that have been shared with the integration. You can filter results to either pages or databases and specify the sort direction. +- **List Users**: Retrieves a list of users from the Notion workspace. +- **List Database Properties**: Retrieves the properties of a specified Notion database. +- **Create Page**: Creates a new page in a specified Notion database with the provided properties. +- **Update Page Property**: Updates the properties of an existing Notion page. +- **Add Content to Page**: Converts markdown text to Notion blocks and appends them to a specified Notion page. +- **List Pages**: Queries a Notion database with filtering and sorting options. +- **Page Content Viewer**: Retrieves the content of a Notion page as plain text. + +Each of these components output both "Data" and "Tool": +- The "Data" output can be used directly in your Langflow for further processing or display. +- The "Tool" output can be utilized in Langflow Agents, allowing them to interact with Notion programmatically. + + +## Additional Resources + +- [Notion API Documentation](https://developers.notion.com/docs/getting-started) +- [Notion Integration Capabilities](https://developers.notion.com/reference/capabilities) + +If you encounter any issues or have questions, please reach out to our support team or consult the Langflow community forums. diff --git a/docs/docs/Integrations/Notion/notion-agent-conversational.md b/docs/docs/Integrations/Notion/notion-agent-conversational.md new file mode 100644 index 000000000000..150c6e53c6e5 --- /dev/null +++ b/docs/docs/Integrations/Notion/notion-agent-conversational.md @@ -0,0 +1,145 @@ +--- +title: Notion Conversational Agent +sidebar_position: 2 +slug: /integrations/notion/notion-agent-conversational +--- + +The Notion Conversational Agent is an AI-powered assistant that interacts with your Notion workspace through natural language conversations. This flow performs Notion-related tasks like creating pages, searching for information, and managing content, all through a chat interface. + +## Prerequisites + +--- + +- [Notion App](/integrations/notion/setup) +- [Notion account and API key](https://www.notion.so/my-integrations) +- [OpenAI API key](https://platform.openai.com/account/api-keys) +- [Download Flow Conversation Agent Flow](./Conversational_Notion_Agent.json)(Download link) + +![Notion Components Toolkit](./notion_conversational_agent_tools.png) + +## Flow Components + +--- + + +### Input and Output +- **Chat Input**: Accepts user queries and commands +- **Chat Output**: Displays the agent's responses + +### Language Model +- **OpenAI Model**: Processes user input and generates responses + +### Agent and Tools +- **Tool Calling Agent**: Coordinates the use of various Notion tools based on user input +- **Toolkit**: Combines multiple Notion-specific tools into a single toolkit +- **Notion Tools**: Various components for interacting with Notion, including: + - List Users + - List Database Properties + - List Pages + - Page Content Viewer + - Create Page + - Update Page Property + - Add Content to Page + - Search + +### Memory and Prompt +- **Chat Memory**: Stores conversation history +- **Prompt**: Provides system instructions and context for the agent +- **Current Date**: Supplies the current date and time for context + + +## Run the Conversational Notion Agent + +--- + +1. Open Langflow and create a new project. +2. Add the components listed above to your flow canvas, or Download the [Conversation Agent Flow](./Conversational_Notion_Agent.json)(Download link) and **Import** the JSON file into Langflow. +3. Connect the components as shown in the flow diagram. +4. Input the Notion and OpenAI API keys in their respective components. +5. To start the flow, click the "Play" button in Langflow. +6. In the chat interface, enter a Notion-related request or question. For example: + - "Create a new page in my task database" + - "Search for pages about project management" + - "List all users in my Notion workspace" +7. The agent will process your request, use the appropriate Notion tools, and provide a response. +8. Continue the conversation, asking follow-up questions or giving new commands as needed. + +## Example Interactions + +--- + +``` +User: List all the users in my Notion workspace. + +Agent: Certainly! I'll use the Notion User List tool to retrieve that information for you. One moment please. + +[Agent uses the Notion User List tool] + +Here's a list of users in your Notion workspace: + +1. John Doe (john@example.com) +2. Jane Smith (jane@example.com) +3. Alex Johnson (alex@example.com) + +Is there anything specific you'd like to know about these users or any other Notion-related task I can help you with? + +User: Create a new page in my Projects database with the title "Website Redesign". + +Agent: Certainly! I'd be happy to create a new page in your Projects database with the title "Website Redesign". I'll use the Notion Page Creator tool to do this. First, let me check the properties of your Projects database to ensure we include all necessary information. + +[Agent uses the Notion Database Properties tool] + +I've retrieved the properties for your Projects database. Now, I'll create the new page with the title "Website Redesign". One moment please. + +[Agent uses the Notion Page Creator tool] + +Great news! I've successfully created a new page in your Projects database with the title "Website Redesign". Here are the details: + +- Title: Website Redesign +- Page ID: [page_id_here] +- URL: [page_url_here] + +Is there any additional information you'd like me to add to this new project page, such as a description, due date, or assigned team members? + +User: Yes, please add a description: "Redesign company website to improve user experience and modernize the look." + +Agent: Certainly! I'd be happy to add that description to the "Website Redesign" project page. I'll use the Add Content to Page tool to update the page with this information. + +[Agent uses the Add Content to Page tool] + +I've successfully added the description to your "Website Redesign" project page. Here's a summary of what I've done: + +1. Located the "Website Redesign" page using the page ID from our previous interaction. +2. Added the following content to the page: + + Description: Redesign company website to improve user experience and modernize the look. + +The description has been added as a new text block on the page. Is there anything else you'd like me to add or modify on this project page? + +``` + + +## Customization + +--- + +The flow can be customized to meet your team's specific needs. + +Customize this flow by: + +1. Adjusting the system prompt to change the agent's behavior or knowledge base. +2. Adding or removing Notion tools based on your specific needs. +3. Modifying the OpenAI model parameters (e.g., temperature) to adjust the agent's response style. + +## Troubleshooting + +--- + +If you encounter issues: + +1. Ensure all API keys are correctly set and have the necessary permissions. +2. Check that your Notion integration has access to the relevant pages and databases. +3. Verify that all components are properly connected in the flow. +4. Review the Langflow logs for any error messages. + +For more advanced usage and integration options, refer to the [Notion API documentation](https://developers.notion.com/) and [Langflow documentation](/). \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md b/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md new file mode 100644 index 000000000000..7980bc15fd6c --- /dev/null +++ b/docs/docs/Integrations/Notion/notion-agent-meeting-notes.md @@ -0,0 +1,176 @@ +--- +title: Notion Meeting Notes Agent +sidebar_position: 1 +slug: /integrations/notion/notion-agent-meeting-notes +--- + +The Notion Agent for Meeting Notes is an AI-powered tool that automatically processes meeting transcripts and updates your Notion workspace. It identifies tasks, action items, and key points from your meetings, then creates new tasks or updates existing ones in Notion without manual input. + +## Prerequisites +--- + +- [Notion App](/integrations/notion/setup) +- [Notion API key](https://www.notion.so/my-integrations) +- [OpenAI API key](https://platform.openai.com/account/api-keys) +- [Download Flow Meeting Agent Flow](./Meeting_Notes_Agent.json)(Download link) + +:::warning + +Before using this flow, ensure you have obtained the necessary API keys from Notion and OpenAI. These keys are essential for the flow to function properly. Keep them secure and do not share them publicly. + +::: + +## Components + +--- + +![Notion Meeting Agent Part 1](./notion_meeting_agent_part_1.png) + + + +### Meeting Transcript (Text Input) + +This component allows users to input the meeting transcript directly into the flow. + +### List Users (Notion Component) + +- **Purpose**: Retrieves a list of users from the Notion workspace. +- **Input**: Notion Secret (API key) +- **Output**: List of user data + +### List Databases (Notion Component) + +- **Purpose**: Searches and lists all databases in the Notion workspace. +- **Input**: + - Notion Secret (API key) + - Query (optional) + - Filter Type (default: database) + - Sort Direction +- **Output**: List of database data + +### Prompt + +This component creates a dynamic prompt template using the following inputs: +- Meeting Transcript +- List of Users +- List of Databases +- Current Date + +### Meeting Summarizer (Tool Calling Agent) + +- **Purpose**: Analyzes the meeting transcript and identifies tasks and action items. +- **Inputs**: + - System Prompt (from the Prompt component) + - Language Model (OpenAI) + - Tools: + - Notion Search + - List Database Properties + - Create Page + - Update Page Property + - Add Content to Page + +![Notion Meeting Agent Part 2](./notion_meeting_agent_part_2.png) + +### Notion Agent (Tool Calling Agent) + +- **Purpose**: Executes actions in Notion based on the meeting summary. +- **Inputs**: + - System Prompt (from the second Prompt component) + - Language Model (OpenAI) + - Tools: + - List Database Properties + - Create Page + - Update Page Property + - Add Content to Page + +### Notion Components (Tools) + +#### List Database Properties + +- **Purpose**: Retrieves the properties of a specified Notion database. +- **Input**: + - Database ID + - Notion Secret (API key) + +#### Create Page + +- **Purpose**: Creates a new page in a Notion database. +- **Inputs**: + - Database ID + - Notion Secret (API key) + - Properties (JSON) + +#### Update Page Property + +- **Purpose**: Updates the properties of an existing Notion page. +- **Inputs**: + - Page ID + - Notion Secret (API key) + - Properties to update + +#### Add Content to Page + +- **Purpose**: Converts markdown text to Notion blocks and appends them to a specified Notion page. +- **Inputs**: + - Page/Block ID + - Notion Secret (API key) + - Markdown text + +### Chat Output + +Displays the final output of the Notion Agent in the Playground. + +## Flow Process + +--- + +1. The user inputs a meeting transcript. +2. The flow retrieves the list of Notion users and databases. +3. A prompt is generated using the transcript, user list, database list, and current date. +4. The Meeting Summarizer analyzes the transcript and identifies tasks and action items. +5. The Notion Agent uses the meeting summary to: + - Create new pages for new tasks + - Update existing pages for existing tasks + - Add content to pages with meeting notes +6. The Chat Output displays a summary of actions taken in Notion. + +## Run the Notion Meeting Notes flow + +--- + +To run the Notion Agent for Meeting Notes: + +1. Open Langflow and create a new project. +2. Add the components listed above to your flow canvas, or download the [Flow Meeting Agent Flow](./Meeting_Notes_Agent.json)(Download link) and **Import** the JSON file into Langflow. +3. Connect the components as shown in the flow diagram. +4. Input the Notion and OpenAI API keys in their respective components. +5. Paste your meeting transcript into the Meeting Transcript component. +6. Run the flow by clicking **Play** on the **Chat Output** component. +7. Review the output in the Chat Output component, which will summarize the actions taken in your Notion workspace. + +For optimal results, use detailed meeting transcripts. The quality of the output depends on the comprehensiveness of the input provided. + +## Customization + +--- + +The flow can be customized to meet your team's specific needs. + +Customize this flow by: + +1. Adjusting the system prompt to change the agent's behavior or knowledge base. +2. Adding or removing Notion tools based on your specific needs. +3. Modifying the OpenAI model parameters (e.g., temperature) to adjust the agent's response style. + +## Troubleshooting + +--- + +If you encounter issues: + +1. Ensure all API keys are correctly set and have the necessary permissions. +2. Check that your Notion integration has access to the relevant pages and databases. +3. Verify that all components are properly connected in the flow. +4. Review the Langflow logs for any error messages. + +For more advanced usage and integration options, refer to the [Notion API documentation](https://developers.notion.com/) and [Langflow documentation](/). \ No newline at end of file diff --git a/docs/docs/Integrations/Notion/notion_conversational_agent_tools.png b/docs/docs/Integrations/Notion/notion_conversational_agent_tools.png new file mode 100644 index 000000000000..c981e6c6645c Binary files /dev/null and b/docs/docs/Integrations/Notion/notion_conversational_agent_tools.png differ diff --git a/docs/docs/Integrations/Notion/notion_meeting_agent_part_1.png b/docs/docs/Integrations/Notion/notion_meeting_agent_part_1.png new file mode 100644 index 000000000000..db32f5b9a506 Binary files /dev/null and b/docs/docs/Integrations/Notion/notion_meeting_agent_part_1.png differ diff --git a/docs/docs/Integrations/Notion/notion_meeting_agent_part_2.png b/docs/docs/Integrations/Notion/notion_meeting_agent_part_2.png new file mode 100644 index 000000000000..387d6f8f4525 Binary files /dev/null and b/docs/docs/Integrations/Notion/notion_meeting_agent_part_2.png differ diff --git a/docs/docs/Integrations/assemblyai-components.png b/docs/docs/Integrations/assemblyai-components.png new file mode 100644 index 000000000000..788feb0aa256 Binary files /dev/null and b/docs/docs/Integrations/assemblyai-components.png differ diff --git a/docs/docs/Integrations/integrations-assemblyai.md b/docs/docs/Integrations/integrations-assemblyai.md new file mode 100644 index 000000000000..3c9ec98b4015 --- /dev/null +++ b/docs/docs/Integrations/integrations-assemblyai.md @@ -0,0 +1,165 @@ +--- +title: AssemblyAI +sidebar_position: 1 +slug: /integrations-assemblyai +--- + + + +# AssemblyAI + +The AssemblyAI components allow you to apply powerful Speech AI models to your app for tasks like: + +- Transcribing audio and video files +- Formatting transcripts +- Generating subtitles +- Applying LLMs to audio files + +More info about AssemblyAI: + +- [Website](https://www.assemblyai.com/) +- [AssemblyAI API Docs](https://www.assemblyai.com/docs) +- [Get a Free API key](https://www.assemblyai.com/dashboard/signup) + + +## Prerequisites + +You need an **AssemblyAI API key**. After creating a free account, you'll find the API key in your dashboard. [Get a Free API key here](https://www.assemblyai.com/dashboard/signup). + +Enter the key in the *AssemblyAI API Key* field in all components that require the key. + +(Optional): To use LeMUR, you need to upgrade your AssemblyAI account, since this is not included in the free account. + +## Components + +![AssemblyAI Components](./assemblyai-components.png) + +### AssemblyAI Start Transcript + +This component allows you to submit an audio or video file for transcription. + +**Tip**: You can freeze the path of this component to only submit the file once. + +- **Input**: + - AssemblyAI API Key: Your API key. + - Audio File: The audio or video file to transcribe. + - Speech Model (Optional): Select the class of models. Default is *Best*. See [speech models](https://www.assemblyai.com/docs/speech-to-text/speech-recognition#select-the-speech-model-with-best-and-nano) for more info. + - Automatic Language Detection (Optional): Enable automatic language detection. + - Language (Optional): The language of the audio file. Can be set manually if automatic language detection is disabled. + See [supported languages](https://www.assemblyai.com/docs/getting-started/supported-languages) for a list of supported language codes. + - Enable Speaker Labels (Optional): Detect speakers in an audio file and what each speaker said. + - Expected Number of Speakers (Optional): Set the expected number of speakers, if Speaker Labels is enabled. + - Audio File URL (Optional): The URL of the audio or video file to transcribe. Can be used instead of *Audio File*. + - Punctuate (Optional): Apply punctuation. Default is true. + - Format Text (Optional): Apply casing and text formatting. Default is true. + +- **Output**: + - Transcript ID: The id of the transcript + + +### AssebmlyAI Poll Transcript + +This components allows you to poll the transcripts. It checks the status of the transcript every few seconds until the transcription is completed. + +- **Input**: + - AssemblyAI API Key: Your API key. + - Polling Interval (Optional): The polling interval in seconds. Default is 3. + +- **Output**: + - Transcription Result: The AssemblyAI JSON response of a completed transcript. Contains the text and other info. + + +### AssebmlyAI Get Subtitles + +This component allows you to generate subtitles in SRT or VTT format. + +- **Input**: + - AssemblyAI API Key: Your API key. + - Transcription Result: The output of the *Poll Transcript* component. + - Subtitle Format: The format of the captions (SRT or VTT). + - Character per Caption (Optional): The maximum number of characters per caption (0 for no limit). + +- **Output**: + - Subtitles: A JSON response with the `subtitles` field containing the captions in SRT or VTT format. + + +### AssebmlyAI LeMUR + +This component allows you to apply Large Language Models to spoken data using the [AssemblyAI LeMUR framework](https://www.assemblyai.com/docs/lemur). + +LeMUR automatically ingests the transcript as additional context, making it easy to apply LLMs to audio data. You can use it for tasks like summarizing audio, extracting insights, or asking questions. + +- **Input**: + - AssemblyAI API Key: Your API key. + - Transcription Result: The output of the *Poll Transcript* component. + - Input Prompt: The text to prompt the model. You can type your prompt in this field or connect it to a *Prompt* component. + - Final Model: The model that is used for the final prompt after compression is performed. Default is Claude 3.5 Sonnet. + - Temperature (Optional): The temperature to use for the model. Default is 0.0. + - Max Output Size (Optional): Max output size in tokens, up to 4000. Default is 2000. + - Endpoint (Optional): The LeMUR endpoint to use. Default is "task". For "summary" and "question-answer", no prompt input is needed. See [LeMUR API docs](https://www.assemblyai.com/docs/api-reference/lemur/) for more info. + - Questions (Optional): Comma-separated list of your questions. Only used if *Endpoint* is "question-answer". + - Transcript IDs (Optional): Comma-separated list of transcript IDs. LeMUR can perform actions over multiple transcripts. If provided, the *Transcription Result* is ignored. + +- **Output**: + - LeMUR Response: The generated LLM response. + +### AssemblyAI List Transcripts + +This component can be used as a standalone component to list all previously generated transcripts. + +- **Input**: + - AssemblyAI API Key: Your API key. + - Limit (Optional): Maximum number of transcripts to retrieve. Default is 20, use 0 for all. + - Filter (Optional): Filter by transcript status. + - Created On (Optional): Only get transcripts created on this date (YYYY-MM-DD). + - Throttled Only (Optional): Only get throttled transcripts, overrides the status filter + +- **Output**: + - Transcript List: A list of all transcripts with info such as the transcript ID, the status, and the data. + + +## Flow Process + +1. The user inputs an audio or video file. +2. The user can also input an LLM prompt. In this example, we want to generate a summary of the transcript. +3. The flow submits the audio file for transcription. +4. The flow checks the status of the transcript every few seconds until transcription is completed. +5. The flow parses the transcription result and outputs the transcribed text. +6. The flow also generates subtitles. +7. The flow applies the LLM prompt to generate a summary. +8. As a standalone component, all transcripts can be listed. + +## Run the Transcription and Speech AI Flow + +To run the Transcription and Speech AI Flow: + +1. Open Langflow and create a new project. +2. Add the components listed above to your flow canvas, or download the [AssemblyAI Transcription and Speech AI Flow](./AssemblyAI_Flow.json)(Download link) and **Import** the JSON file into Langflow. +3. Connect the components as shown in the flow diagram. **Tip**: Freeze the path of the *Start Transcript* component to only submit the file once. +4. Input the AssemblyAI API key in in all components that require the key (Start Transcript, Poll Transcript, Get Subtitles, LeMUR, List Transcripts). +5. Select an audio or video file in the *Start Transcript* component. +6. Run the flow by clicking **Play** on the *Parse Data* component. Make sure that the specified template is `{text}`. +7. To generate subtitles, click **Play** on the *Get Subtitles* component. +8. To apply an LLM to your audio file, click **Play** on the *LeMUR* component. Note that you need an upgraded AssemblyAI account to use LeMUR. +9. To list all transcripts, click **Play** on the *List Transcript* component. + + +## Customization + +The flow can be customized by: + +1. Modifying the parameters in the *Start Transcript* component. +2. Modifying the subtitle format in the *Get Subtitles* component. +3. Modifying the LLM prompt for input of the *LeMUR* component. +4. Modifying the LLM parameters (e.g., temperature) in the *LeMUR* component. + +## Troubleshooting + +If you encounter issues: + +1. Ensure the API key is correctly set in all components that require the key. +2. To use LeMUR, you need to upgrade your AssemblyAI account, since this is not included in the free account. +3. Verify that all components are properly connected in the flow. +4. Review the Langflow logs for any error messages. + +For more advanced usage, refer to the [AssemblyAI API documentation](https://www.assemblyai.com/docs/). If you need more help, you can reach out to the [AssemblyAI support](https://www.assemblyai.com/contact/support). diff --git a/docs/docs/Integrations/integrations-langfuse.md b/docs/docs/Integrations/integrations-langfuse.md new file mode 100644 index 000000000000..2e64536f05da --- /dev/null +++ b/docs/docs/Integrations/integrations-langfuse.md @@ -0,0 +1,78 @@ +--- +title: Langfuse +sidebar_position: 2 +slug: /integrations-langfuse +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Integrate Langfuse with Langflow + +[Langfuse](https://langfuse.com/) is an observability and analytics platform specifically designed for language models and AI applications. + +This guide walks you through how to configure Langflow to collect [tracing](https://langfuse.com/docs/tracing) data about your flow executions and automatically send the data to Langfuse. + +## Prerequisites + +- A project in Langflow with a runnable flow +- A Langfuse Cloud account in any [data region](https://langfuse.com/faq/all/cloud-data-regions) +- A Langfuse organization and project + +## Create Langfuse project credentials + +1. In Langfuse, go to your project settings, and then create a new set of API keys. + +2. Copy the following API key information: + + - Secret Key + - Public Key + - Host URL + +## Set your Langfuse credentials as environment variables + +Set your Langfuse project credentials as environment variables in the same environment where you run Langflow. + +You can use any method you prefer to set environment variables. +The following examples show how to set environment variables in a terminal session (Linux or macOS) and in a command prompt session (Windows): + + + + +``` +export LANGFUSE_SECRET_KEY=SECRET_KEY +export LANGFUSE_PUBLIC_KEY=PUBLIC_KEY +export LANGFUSE_HOST=HOST_URL +``` + + + +``` +set LANGFUSE_SECRET_KEY=SECRET_KEY +set LANGFUSE_PUBLIC_KEY=PUBLIC_KEY +set LANGFUSE_HOST=HOST_URL +``` + + + + +Replace `SECRET_KEY`, `PUBLIC_KEY`, and `HOST_URL` with the API key information you copied from Langfuse. + +## Start Langflow and run a flow + +1. Start Langflow in the same terminal or environment where you set the environment variables: + + ```bash + python -m langflow run + ``` + +2. In Langflow, open and existing project, and then run a flow. + +## View tracing data in Langfuse + +Langflow automatically collects and sends tracing data about the flow execution to Langfuse. +You can view the collected data in your Langfuse project dashboard. + +## Disable the Langfuse integration + +To disable the Langfuse integration, remove the environment variables you set in the previous steps and restart Langflow. diff --git a/docs/docs/Integrations/integrations-langsmith.md b/docs/docs/Integrations/integrations-langsmith.md index d26b707b0a65..87afcf5d3d42 100644 --- a/docs/docs/Integrations/integrations-langsmith.md +++ b/docs/docs/Integrations/integrations-langsmith.md @@ -1,6 +1,6 @@ --- title: LangSmith -sidebar_position: 0 +sidebar_position: 3 slug: /integrations-langsmith --- diff --git a/docs/docs/Integrations/integrations-langwatch.md b/docs/docs/Integrations/integrations-langwatch.md index 53440e7165d5..e6d3b1ec76fe 100644 --- a/docs/docs/Integrations/integrations-langwatch.md +++ b/docs/docs/Integrations/integrations-langwatch.md @@ -1,6 +1,6 @@ --- title: LangWatch -sidebar_position: 1 +sidebar_position: 4 slug: /integrations-langwatch --- diff --git a/docs/docs/Settings/418277339.png b/docs/docs/Settings/418277339.png deleted file mode 100644 index 9909932f9f79..000000000000 Binary files a/docs/docs/Settings/418277339.png and /dev/null differ diff --git a/docs/docs/Settings/_category_.json b/docs/docs/Settings/_category_.json deleted file mode 100644 index 42ef607366bf..000000000000 --- a/docs/docs/Settings/_category_.json +++ /dev/null @@ -1 +0,0 @@ -{"position":7, "label":"Settings"} \ No newline at end of file diff --git a/docs/docs/Settings/settings-global-variables.md b/docs/docs/Settings/settings-global-variables.md deleted file mode 100644 index 1e7d2f52c7d5..000000000000 --- a/docs/docs/Settings/settings-global-variables.md +++ /dev/null @@ -1,163 +0,0 @@ ---- -title: Global Variables -sidebar_position: 0 -slug: /settings-global-variables ---- - -import ReactPlayer from "react-player"; - -:::info - -This page may contain outdated information. It will be updated as soon as possible. - -::: - - - - -Global Variables are a useful feature of Langflow, allowing you to define reusable variables accessed from any Text field in your project. - - -**TL;DR** - -- Global Variables are reusable variables accessible from any Text field in your project. -- To create one, click the 🌐 button in a Text field and then **+ Add New Variable**. -- Define the **Name**, **Type**, and **Value** of the variable. -- Click **Save Variable** to create it. -- All Credential Global Variables are encrypted and accessible only by you. -- Set _`LANGFLOW_STORE_ENVIRONMENT_VARIABLES`_ to _`true`_ in your `.env` file to add all variables in _`LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`_ to your user's Global Variables. - -### Create and Add a Global Variable {#3543d5ef00eb453aa459b97ba85501e5} - - -To create and add a global variable, click the 🌐 button in a Text field, and then click **+ Add New Variable**. - - -Text fields are where you write text without opening a Text area, and are identified with the 🌐 icon. - - -For example, to create an environment variable for the **OpenAI** component: - -1. In the **OpenAI API Key** text field, click the 🌐 button, then **Add New Variable**. -2. Enter `openai_api_key` in the **Variable Name** field. -3. Paste your OpenAI API Key (`sk-...`) in the **Value** field. -4. Select **Credential** for the **Type**. -5. Choose **OpenAI API Key** in the **Apply to Fields** field to apply this variable to all fields named **OpenAI API Key**. -6. Click **Save Variable**. - -You now have a `openai_api_key` global environment variable for your Langflow project. -Subsequently, clicking the 🌐 button in a Text field will display the new variable in the dropdown. - - -:::tip - -You can also create global variables in Settings > Global Variables. - -::: - - - - -![](./418277339.png) - - -To view and manage your project's global environment variables, visit **Settings** > **Global Variables**. - - -### Configure Environment Variables in your .env file {#76844a93dbbc4d1ba551ea1a4a89ccdd} - - -Setting `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to `true` in your `.env` file (default) adds all variables in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT` to your user's Global Variables. - - -These variables are accessible like any other Global Variable. - - -:::info - -To prevent this behavior, set `LANGFLOW_STORE_ENVIRONMENT_VARIABLES` to `false` in your `.env` file. - -::: - - - - -You can specify variables to get from the environment by listing them in `LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT`, as a comma-separated list (e.g., _`VARIABLE1, VARIABLE2`_). - - -The default list of variables includes the ones below and more: - -- ANTHROPIC_API_KEY -- ASTRA_DB_API_ENDPOINT -- ASTRA_DB_APPLICATION_TOKEN -- AZURE_OPENAI_API_KEY -- AZURE_OPENAI_API_DEPLOYMENT_NAME -- AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME -- AZURE_OPENAI_API_INSTANCE_NAME -- AZURE_OPENAI_API_VERSION -- COHERE_API_KEY -- GOOGLE_API_KEY -- GROQ_API_KEY -- HUGGINGFACEHUB_API_TOKEN -- OPENAI_API_KEY -- PINECONE_API_KEY -- SEARCHAPI_API_KEY -- SERPAPI_API_KEY -- UPSTASH_VECTOR_REST_URL -- UPSTASH_VECTOR_REST_TOKEN -- VECTARA_CUSTOMER_ID -- VECTARA_CORPUS_ID -- VECTARA_API_KEY - - - - -### Precautions - -Global variables are stored in the database, and their values are protected by encryption using a secret -key. To preserve access to your global variables and avoid losing them, you should take a few precautions: - -1. Keep your secret key safe: Even if your database is secure, it won’t be of much use if you can't decrypt -the values. Ideally, you can set your own secret key using the `LANGFLOW_SECRET_KEY` environment variable. If -you don't provide a custom value for the secret key, one will be generated randomly and saved in the Langflow -installation directory. - -2. We use SQLite as the default database, and Langflow saves the database file in the installation directory. -To ensure the security of your data, it’s a good practice to regularly back up this file. If needed, you can -also change the database location by setting the `LANGFLOW_SAVE_DB_IN_CONFIG_DIR` environment variable to true -and configuring `LANGFLOW_CONFIG_DIR` to point to a directory of your choice. Alternatively, you can opt to use -an external database such as PostgreSQL, in which case these configurations are no longer necessary. - -For your convenience, if you’re running Langflow directly on your system or in a virtual environment -via a pip installation, you can set these values by providing Langflow with a .env file containing these -environment variables, using the following command: - -```bash -langflow run --env-file .env -``` - -If you’re running Langflow in a Docker container, you can set these values by providing Langflow with: - -```bash -docker run \ - --privileged \ - --user 1000:0 \ - -p 7860:7860 \ - -e LANGFLOW_SECRET_KEY= \ - -e LANGFLOW_SAVE_DB_IN_CONFIG_DIR=true \ - -e LANGFLOW_CONFIG_DIR=/app/container_path \ - -v $(PWD)/your_path:/app/container_path \ - langflowai/langflow:latest -``` - -or - -```bash -docker run \ - --privileged \ - --user 1000:0 \ - -p 7860:7860 \ - --env-file .env \ - -v $(PWD)/your_path:/app/container_path \ - langflowai/langflow:latest -``` diff --git a/docs/docs/Settings/settings-project-general-settings.md b/docs/docs/Settings/settings-project-general-settings.md deleted file mode 100644 index 089c29d60685..000000000000 --- a/docs/docs/Settings/settings-project-general-settings.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: Project & General Settings -sidebar_position: 1 -slug: /settings-project-general-settings ---- - - - -:::info - -This page may contain outdated information. It will be updated as soon as possible. - -::: - - - - -Change the **Project Settings** or **General Settings** for Langflow. - - -## Project Settings {#71e61e6544c94f808cd74b8cc012363d} - - ---- - - -Click **Project Name** > **Settings** to view your **Project Settings**. - -- **Name** - the name of your project. -- **Description** - the description for your project. -Visible on the Langflow Store. -- **Endpoint name** - the custom endpoint name for your project's API endpoint. -To use the default value, leave this field blank. - -## General Settings {#1a0c451fd5e84feeb1d18c2886d642eb} - - ---- - - -Select your **Profile Picture** > **Settings** to view your **General Settings**. - - -### Profile Picture {#8abfa80ed6c448b6977467679d43c275} - - -Select a profile picture. - - -### Store API Key {#6a12756beb0b42fd84bdf5ab5f10fffb} - - -Add your **Langflow Store** API key. To get a Store key, go to the [Langflow store](https://www.langflow.store/). - - -### Global Variables {#12aa7f28060447babc987bdf57fc065e} - - -Select **Add New** to add a key to Langflow. - - -Select the **trash icon** to delete a key. - - -For more information, see Global Variables. - - -### Langflow API {#0a08ffc3dd9042da9f9d2d49c9df0b6a} - - -Create a Langflow API key. - - -Click **Add New** > **Create Secret Key** and copy the key somewhere safe and accessible. - - -For more information, see Langflow API. - - -### Shortcuts {#6c9d705c9e7f466db496dbc6571c81d2} - - -A list of keyboard shortcuts for Langflow. - - -### Messages {#898425cf88b74c048c23e8e1e1d7c2bd} - - -Inspect, edit, and remove messages in your flow for testing and debugging purposes. - - -For more information, see the Playground. - diff --git a/docs/docs/Starter-Projects/starter-flow-simple-agent.png b/docs/docs/Starter-Projects/starter-flow-simple-agent.png new file mode 100644 index 000000000000..fc14c0f2bfd4 Binary files /dev/null and b/docs/docs/Starter-Projects/starter-flow-simple-agent.png differ diff --git a/docs/docs/Starter-Projects/starter-flow-travel-planning-agent.png b/docs/docs/Starter-Projects/starter-flow-travel-planning-agent.png new file mode 100644 index 000000000000..64a6cd9bfde0 Binary files /dev/null and b/docs/docs/Starter-Projects/starter-flow-travel-planning-agent.png differ diff --git a/docs/docs/Starter-Projects/starter-projects-dynamic-agent.md b/docs/docs/Starter-Projects/starter-projects-dynamic-agent.md new file mode 100644 index 000000000000..50df4aa76e00 --- /dev/null +++ b/docs/docs/Starter-Projects/starter-projects-dynamic-agent.md @@ -0,0 +1,53 @@ +--- +title: Dynamic agent +sidebar_position: 7 +slug: /starter-projects-dynamic-agent +--- + +Build a **Dynamic Agent** flow for an agentic application using the CrewAI. + +An **agent** uses an LLM as its "brain" to reason through tasks and select among the connected tools to complete them. + +This flow uses [CrewAI](https://docs.crewai.com/) to manage a [Hierarchical crew](https://docs.crewai.com/how-to/Hierarchical/) of **Agents** as they perform a sequence of **Tasks**. + +CrewAI agents have **Roles**, **Goals**, and **Backstories** that define their behavior and interactions with other agents. Agents in a Hierarchical Crew are managed by a single agent with a **Manager** role, which is connected to an **Open AI** LLM component to reason through the tasks and select the appropriate tools to complete them. + +This flow is "dynamic" because it uses the **Chat input** component's text to define a CrewAI agent's Role, Goal, and Backstory. The created agent then uses the connected tools to research and complete the **Task** created from the **Chat input** component. + +## Prerequisites + +To use this flow, you need an [OpenAI API key](https://platform.openai.com/) and a [Search API key](https://www.searchapi.io/). + +## Open Langflow and start a new project + +Click **New Project**, and then select the **Dynamic Agent** project. + +This opens a starter project with the necessary components to run an agentic application using CrewAI. + +The **Dynamic Agent** flow consists of these components: + +* The **Chat Input** component accepts user input to the chat. +* The **Prompt** component combines the user input with a user-defined prompt. +* The **OpenAI** model component sends the user input and prompt to the OpenAI API and receives a response. +* The **Chat Output** component prints the flow's output to the chat. +* The **CrewAI Agent** component is an autonomous unit programmed to perform tasks, make decisions, and communicate with other agents. +* The **Crew AI Crew** component represents a collaborative group of agents working together to achieve a set of tasks. This Crew can manage work **sequentially** or **hierarchically**. +* The **Crew AI Task** component is a specific assignment to be completed by agents. +This task can be **sequential** or **hierarchical** depending on the Crew's configuration. +* The **SearchAPI** tool performs web searches using the **SearchAPI.io** API. +* The **Yahoo Finance News Tool** component creates a tool for retrieving news from Yahoo Finance. + +## Run the Dynamic Agent flow + +1. Add your credentials to the OpenAI and SearchAPI components using Langflow's Global Variables: + - Click **Settings**, then **Global Variables**. + - Click **Add New**. + - Name your variable and paste your API key in the **Value** field. + - In the **Apply To Fields** field, select the field to apply this variable to. + - Click **Save Variable**. +2. In the **Chat output** component, click ▶️ Play to start the end-to-end application flow. + A **Chat output built successfully** message and a ✅ Check on all components indicate that the flow ran successfully. +3. Click **Playground** to start a chat session. + You should receive a detailed, helpful answer to the question defined in the **Chat input** component. + +Now that your query has completed the journey from **Chat input** to **Chat output**, you have completed the **Dynamic Agent** flow. \ No newline at end of file diff --git a/docs/docs/Starter-Projects/starter-projects-simple-agent.md b/docs/docs/Starter-Projects/starter-projects-simple-agent.md new file mode 100644 index 000000000000..d01817adabe4 --- /dev/null +++ b/docs/docs/Starter-Projects/starter-projects-simple-agent.md @@ -0,0 +1,58 @@ +--- +title: Simple agent +sidebar_position: 6 +slug: /starter-projects-simple-agent +--- + +Build a **Simple Agent** flow for an agentic application using the Tool-calling agent. + +An **agent** uses an LLM as its "brain" to select among the connected tools and complete its tasks. + +In this flow, the **Tool-calling agent** reasons using an **Open AI** LLM to solve math problems. It will select the **Calculator** tool for simpler math, and the **Python REPL** tool (with the Python `math` library) for more complex problems. + +## Prerequisites + +To use this flow, you need an OpenAI API key. + +## Open Langflow and start a new project + +Click **New Project**, and then select the **Simple Agent** project. + +This opens a starter project with the necessary components to run an agentic application using the Tool-calling agent. + +## Simple Agent flow + +![](./starter-flow-simple-agent.png) + +The **Simple Agent** flow consists of these components: + +* The **Tool calling agent** component uses the connected LLM to reason through the user's input and select among the connected tools to complete its task. +* The **Python REPL tool** component executes Python code in a REPL (Read-Evaluate-Print Loop) interpreter. +* The **Calculator** component performs basic arithmetic operations. +* The **Chat Input** component accepts user input to the chat. +* The **Prompt** component combines the user input with a user-defined prompt. +* The **Chat Output** component prints the flow's output to the chat. +* The **OpenAI** model component sends the user input and prompt to the OpenAI API and receives a response. + +## Run the Simple Agent flow + +1. Add your credentials to the Open AI component. +2. In the **Chat output** component, click ▶️ Play to start the end-to-end application flow. + A **Chat output built successfully** message and a ✅ Check on all components indicate that the flow ran successfully. +3. Click **Playground** to start a chat session. +4. Enter a simple math problem, like `2 + 2`, and then make sure the bot responds with the correct answer. +5. To confirm the REPL interpreter is working, prompt the `math` library directly with `math.sqrt(4)` and see if the bot responds with `4`. +6. The agent will also reason through more complex word problems. For example, prompt the agent with the following math problem: + +```plain +The equation 24x2+25x−47ax−2=−8x−3−53ax−2 is true for all values of x≠2a, where a is a constant. +What is the value of a? +A) -16 +B) -3 +C) 3 +D) 16 +``` + +The agent should respond with `B`. + +Now that your query has completed the journey from **Chat input** to **Chat output**, you have completed the **Simple Agent** flow. diff --git a/docs/docs/Starter-Projects/starter-projects-travel-planning-agent.md b/docs/docs/Starter-Projects/starter-projects-travel-planning-agent.md new file mode 100644 index 000000000000..1bd2b53ef402 --- /dev/null +++ b/docs/docs/Starter-Projects/starter-projects-travel-planning-agent.md @@ -0,0 +1,48 @@ +--- +title: Travel planning agent +sidebar_position: 8 +slug: /starter-projects-travel-planning-agent +--- + +Build a **Travel Planning Agent** flow for an agentic application using the multiple Tool-calling agents. + +An **agent** uses an LLM as its "brain" to select among the connected tools and complete its tasks. + +In this flow, multiple **Tool-calling agents** reason using an **Open AI** LLM to plan a travel journey. Each agent is given a different responsibility defined by its **System Prompt** field. + +The **Chat input** defines where the user wants to go, and passes the result to the **City Selection** agent. The **Local Expert** agent then adds information based on the selected cities, and the **Travel Concierge** assembles a seven day travel plan in Markdown. + +All agents have access to the **Search API** and **URL Content Fetcher** components, while only the Travel Concierge can use the **Calculator** for computing the trip costs. + +## Prerequisites + +To use this flow, you need an [OpenAI API key](https://platform.openai.com/) and a [Search API key](https://www.searchapi.io/). + +## Open Langflow and start a new project + +Click **New Project**, and then select the **Travel Planning Agent** project. + +This opens a starter project with the necessary components to run an agentic application using multiple Tool-calling agents. + +## Travel Planning Agent flow + +![](./starter-flow-travel-planning-agent.png) + +The **Travel Planning Agent** flow consists of these components: + +* Multiple **Tool calling agent** components that use the connected LLM to reason through the user's input and select among the connected tools to complete their tasks. +* The **Calculator** component performs basic arithmetic operations. +* The **URL Content Fetcher** component scrapes content from a given URL. +* The **Chat Input** component accepts user input to the chat. +* The **Chat Output** component prints the flow's output to the chat. +* The **OpenAI** model component sends the user input and prompt to the OpenAI API and receives a response. + +## Run the Travel Planning Agent flow + +1. Add your credentials to the Open AI and Search API components. +2. In the **Chat output** component, click ▶️ Play to start the end-to-end application flow. + A **Chat output built successfully** message and a ✅ Check on all components indicate that the flow ran successfully. +3. Click **Playground** to start a chat session. + You should receive a detailed, helpful answer to the journey defined in the **Chat input** component. + +Now that your query has completed the journey from **Chat input** to **Chat output**, you have completed the **Travel Planning Agent** flow. diff --git a/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md b/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md index a95aa46d5efb..c9a34e5f81de 100644 --- a/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md +++ b/docs/docs/Starter-Projects/starter-projects-vector-store-rag.md @@ -23,7 +23,7 @@ We've chosen [Astra DB](https://astra.datastax.com/signup?utm_source=langflow-p --- -- [Langflow installed and running](http://localhost:3000/getting-started/install-langflow) +- [Langflow installed and running](https://docs.langflow.org/getting-started-installation) - [OpenAI API key](https://platform.openai.com/) - [An Astra DB vector database created](https://docs.datastax.com/en/astra-db-serverless/get-started/quickstart.html) with: - Application Token diff --git "a/docs/docs/What\342\200\231s-New/1143907392.png" "b/docs/docs/What\342\200\231s-New/1143907392.png" deleted file mode 100644 index 54779b8537ca..000000000000 Binary files "a/docs/docs/What\342\200\231s-New/1143907392.png" and /dev/null differ diff --git "a/docs/docs/What\342\200\231s-New/_category_.json" "b/docs/docs/What\342\200\231s-New/_category_.json" deleted file mode 100644 index d014c875afec..000000000000 --- "a/docs/docs/What\342\200\231s-New/_category_.json" +++ /dev/null @@ -1 +0,0 @@ -{"position":0, "label":"What’s New?"} \ No newline at end of file diff --git "a/docs/docs/What\342\200\231s-New/whats-new-a-new-chapter-langflow.md" "b/docs/docs/What\342\200\231s-New/whats-new-a-new-chapter-langflow.md" deleted file mode 100644 index 1c69dd8a7aae..000000000000 --- "a/docs/docs/What\342\200\231s-New/whats-new-a-new-chapter-langflow.md" +++ /dev/null @@ -1,175 +0,0 @@ ---- -title: 1.0 - A new chapter for Langflow -sidebar_position: 0 -slug: /whats-new-a-new-chapter-langflow ---- - - - -![](./1143907392.png) - - -## First things first {#5add758564574950862f689198a4e085} - - ---- - - -**Thank you all for being part of the Langflow community**. The journey so far has been amazing, and we are thrilled to have you with us. - - -We have some exciting news to share with you. Langflow is evolving, and we want to tell you all about it! - - -## What's new? {#ecad059e8f8249c598404c0b2bab36fa} - - ---- - - -In the past year, we learned a lot from the community and our users. We saw the potential of Langflow and the need for a visual, interactive platform for building conversational AI applications (and beyond). You thought us the importance of a platform that is easy to use, but also powerful and controllable, and that made clear to us how Langflow's transparency could be improved. - - -Below are some of the new features we included to make that happen! - - -### Same Component, Multiple Outputs {#85986f3ae303444fb69d9ea1baa2eca8} - - -Components can now have more than a single output, allowing for unique flexibility in creating complex flows. The game-changer is output routing — it allows for so many new capabilities it’s almost silly to describe! - -1. Branch to one or more subsequent components; -2. Apply logic operations like if/else and exploit decision-making; -3. Create classification models that choose between paths; -4. Enable the development of agent architectures from scratch; -5. Build an orchestrator that routes between agents. - -### Flow + Composition {#12d86f91b5af4abc9f8e885c5f828df4} - - -One key change you'll notice is that projects now require you to define **Inputs** and **Outputs**. They allow you to define the structure of your conversation and how data flows through it. This change comes with a new way of visualizing your projects. - - -Before 1.0 you would connect components to ultimately build one final component that was processed behind the scenes. Now, each step of the process is defined by you, is visible on the workspace, and can be monitored and controlled. - - -This makes it so that composition is now just **another way** of building in Langflow and **data flows through your project more transparently**. This means that the easy stuff is _really_ easy and the complex parts are still possible! - -- **Flow:** Data is processed by one component and then passed to the next component in line for immediate execution. -- **Composition**: Allows components to not only forward data but also share states for modular building. - -For example, a flow can sequentially process text, and after a few steps, trigger an agent. It can access functions that wait to be called or to respond. This blend of flow and composition brings an unprecedented level of flexibility and control to data workflows in LLM-based apps and agents that use multiple models and APIs working together to achieve tasks. - - -### Memory Management {#a9e352272f4a44499f52fae65b663118} - - -Langflow 1.0 natively allows every chat message to be stored, and a single flow can have multiple memory sessions. This enables you to create multiple “memories” for agents to store and recall specific information as needed. - - -You can edit and remove previous messages to inspect and validate a model’s response behavior. Control, explore, and manage conversation histories to get your models acting just right. - - -### Component Freeze 🥶 {#4912d08da5464ff2aff595d6b26fd809} - - -Component output freezing is back in Langflow, and it’s cooler than ever! - - -Once a component runs, you can now lock its previous output state to prevent it from re-running. - - -Avoid spending extra tokens and remove repetition when output should be constant — plus it's great for debugging and prototyping! - - -### Output Preview {#f56d4400b0214ef2ab1206add068dd99} - - -Each component now includes an output visualizer that opens a pop-up screen, allowing you to easily inspect and monitor transmissions between components. It provides instant feedback on your workflows, letting you see results as they are processed. 🔍 - - -### Inputs and Outputs Handling {#2112c11dc496480c9bd681f04f8533ea} - - -Inputs and outputs finally make more sense to us, and hopefully to you too. - - -We’re proposing Langflow-native types to keep things consistent, but not limited to use any Python type. For instance, a Chat Input component sends out what we call a Message object (text + metadata like date, time, and sender), but maybe you want to introduce external types from your favorite Python package? Go wild. Each native type will have their own visualization modes and will evolve according to new integrations added. - - -### Custom Endpoint Name {#bfeb7018e8bd46bf9dbef43ce2d3692b} - - -Now you can pick a custom name for your endpoint used to call your flow from the API. - - -### Logs & Monitoring {#c902ee43b4b24711b526352ae2451c48} - - -A new logs page has been added! Now, both component executions and message history from the chat can be inspected in an interactive table. This will make it easier to debug, inspect, and manage messages passing through components. - - -### Folders 📁 {#d01237fd83e4467994fed6fa7a8ee4f4} - - -We introduced folders on the home page to help categorize flows and components. Create, remove and edit them to keep your work organized. - - -### Playground {#27649918dcb64a0ebae83db767bfe2eb} - - -By having a clear definition of Inputs and Outputs, we could build the experience around that, which led us to create the Playground. - - -When building a project, testing and debugging are crucial. The Playground is an interface that changes dynamically based on the Inputs and Outputs you defined in your project. - - -For example, let's say you are building a simple RAG application. Generally, you have an Input, some references that come from a Vector Store Search, a Prompt, and the answer. Now, you could plug the output of your Prompt into a Text Output, rename that to "Prompt Result," and see the output of your Prompt in the Playground. We have many planned features for the Playground, and we're excited to see how you'll explore it! - - -### Multi-Modal {#b7753bc9d72f40c49c4074a8d0e51344} - - -Langflow is now multi-modal! It can now handle images and more soon! - - -We’ve also improved project organization, global variables and overall settings, added Python 3.12 compatibility, keyboard shortcuts and a lot of new and fun experimental components! - - -## An Easier Start {#c6bf434c0d35482bbf2ef0bceff902f0} - - ---- - - -The experience for first-time users is something we wanted to improve. For that we created a couple of Starter Projects. It's now much easier to start a new project, and you can choose from a list of starter projects to get you going. - - -For now, we have: - -- **Basic Prompting (Hello, World)**: Learn the basics of a Prompt Component. -- **Vector Store RAG**: Ingest data into a Vector Store and then use it to run a RAG application. -- **Memory Chatbot**: Create a simple chatbot that can remember things about the user. -- **Document QA**: Build a simple flow that helps you get answers about a document. -- **Blog Writer**: Expand on the Prompt variables and be creative about what inputs you add to it. - -Please let us know what other starter projects you would like to see in the future! - - -## What's Next? {#21004578890d4397bc291b43eb140640} - - ---- - - -Langflow has gone through a big change, and we are excited to see how you use it and what you think of it. We plan to add more types of Input and Output like Image and Audio, and we also plan to add more Components to help you build more complex projects. - - -We are excited to see the community embracing Langflow as their number one AI builder and eagerly wait to see what new inspiring projects will come out of this release! A big thanks to everyone who's supporting or being part of this community in any way. ✨ - - -Sincerely, - - -**The Langflow Team 🚀** - diff --git a/docs/docs/Workspace/workspace-logs.md b/docs/docs/Workspace/workspace-logs.md index a29d76fa3876..e1c3b504de00 100644 --- a/docs/docs/Workspace/workspace-logs.md +++ b/docs/docs/Workspace/workspace-logs.md @@ -1,6 +1,6 @@ --- title: Logs -sidebar_position: 3 +sidebar_position: 4 slug: /workspace-logs --- diff --git a/docs/docs/Workspace/workspace-overview.md b/docs/docs/Workspace/workspace-overview.md new file mode 100644 index 000000000000..ab3e1e64df7c --- /dev/null +++ b/docs/docs/Workspace/workspace-overview.md @@ -0,0 +1,77 @@ +--- +title: Workspace concepts +sidebar_position: 1 +slug: /workspace-overview +--- + +The **workspace** is where you create AI applications by connecting and running components in flows. + +The workspace controls allow you to adjust your view and lock your flows in place. + +## Components + +A **component** is a single building block within a flow and consists of inputs, outputs, and parameters that define its functionality. + +To add a component to your flow, drag it from the sidebar onto the workspace. + +To connect components, drag a line from the output handle (⚪) of one component to the input handle of another. + +For more information, see [How to build flows with components](/components-overview). + +## Playground + +The **Playground** executes the current flow in the workspace. + +Chat with your flow, view inputs and outputs, and modify your AI's memories to tune your responses in real time. + +Any input or output component can be opened in the **Playground** and tested in real time. + +For more information, see the [Playground documentation](/workspace-playground). + +## API + +The **API** pane provides code templates to integrate your flows into external applications. + +For more information, see the [API documentation](/workspace-api). + +## Collections, folders, and projects + +The **My Collection** page displays all the flows and components you've created in the Langflow workspace. + +Your **collection** is the top-level categorization for all of your projects. + +### My projects + +**My Projects** is the default folder where all new projects and components are initially stored. + +Collections, folders, projects, and flows are exchanged as JSON objects + +* To create a new folder, click 📁 **New Folder**. + +* To rename a folder, double-click the folder name. + +* To download a folder, click 📥 **Download**. + +* To upload a folder, click 📤 **Upload**. The default maximum file upload size is 100 MB. + +* To move a flow or component, drag and drop it into the desired folder. + +## Options menu + +The dropdown menu labeled with the project name offers several management and customization options for the current flow in the Langflow workspace. + +* **New**: Create a new flow from scratch. +* **Settings**: Adjust settings specific to the current flow, such as its name, description, and endpoint name. +* **Logs**: View logs for the current project, including execution history, errors, and other runtime events. +* **Import**: Import a flow or component from a JSON file into the workspace. +* **Export**: Export the current flow as a JSON file. +* **Undo (⌘Z)**: Revert the last action taken in the project. +* **Redo (⌘Y)**: Reapply a previously undone action. +* **Refresh All**: Refresh all components and delete cache. + +## Settings + +Click ⚙️ **Settings** to access **Global variables**, **Langflow API**, **Shortcuts**, and **Messages**. + + + diff --git a/docs/docs/Workspace/workspace-playground.md b/docs/docs/Workspace/workspace-playground.md index bca87781095f..e1d47f355c2b 100644 --- a/docs/docs/Workspace/workspace-playground.md +++ b/docs/docs/Workspace/workspace-playground.md @@ -1,6 +1,6 @@ --- title: Playground -sidebar_position: 1 +sidebar_position: 2 slug: /workspace-playground --- @@ -35,9 +35,53 @@ You can also open a flow's **Playground** without entering its workspace. From * --- +Click the Memories tab to open a table of previous interactions for a specific flow. -Whenever you send a message from the **Playground** interface, under the **Memories** **Tab** you'll see a table of previous interactions for that session. +Whenever you send a message from the **Playground** interface, under the **Memories** tab you'll see a table of previous interactions for that session. +Langflow allows every chat message to be stored, and a single flow can have multiple memory sessions. -Langflow allows every chat message to be stored, and a single flow can have multiple memory sessions. To learn more about how to use memories in Langflow, see [Chat Memory](/guides-chat-memory). +Chat conversations store messages categorized by a Session ID. A single flow can host multiple Session IDs, and different flows can share the same Session ID. + +Individual messages in chat memory can be edited or deleted. Modifying these memories will influence the behavior of the chatbot responses. + +To learn more about memories in Langflow, see [Chat Memory](/guides-chat-memory). + +## Use custom Session IDs for multiple user interactions + +Session ID values are used to track user interactions in a flow. They can be configured in the Advanced Settings of the Chat Input and Chat Output components. + +By default, if the Session ID value is empty, it is set to the same value as the Flow ID. This means every API call will use the same Session ID, and you’ll effectively have one session. + +To have more than one session in a single flow, pass a specific Session ID to a flow with the `session_id` parameter in the URL. All the components in the flow will automatically use this `session_id` value. + +Post a message to a flow with a specific Session ID with curl: + +```bash +curl -X POST \ + "http://127.0.0.1:7860/api/v1/run/4017e9f2-1fec-4643-bb05-165a8b50c4b3?stream=false" \ + -H 'Content-Type: application/json' \ + -d '{"input_value": "message", + "output_type": "chat", + "input_type": "chat", + "session_id": "YOUR_SESSION_ID" +}' +``` + +Check your flow’s Memories tab. In addition to the messages stored for the Default Session, a new chat session is started with your new Session ID. + +**Chat Input** and **Chat Output** components can also store a `session_id` parameter as a **Tweak** for specific sessions. The Playground will still display all available sessions, but the flow will use the value stored in the `session_id` tweak. + +```bash +curl -X POST \ + "http://127.0.0.1:7860/api/v1/run/4017e9f2-1fec-4643-bb05-165a8b50c4b3?stream=false" \ + -H 'Content-Type: application/json' \ + -d '{"input_value": "message", + "output_type": "chat", + "input_type": "chat", + "tweaks": { + "session_id": "YOUR_SESSION_ID" + } +}' +``` diff --git a/docs/docs/Workspace/workspace.md b/docs/docs/Workspace/workspace.md deleted file mode 100644 index 53a94b67bb7f..000000000000 --- a/docs/docs/Workspace/workspace.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -title: Workspace Overview -sidebar_position: 0 -slug: /workspace ---- - - - -## The Langflow Workspace {#5c8161f9bcc14bfca766685d15251d0c} - - ---- - - -The **Langflow Workspace** is where you assemble new flows and create AIs by connecting and running components. - - -![](./310382168.png) - - -### Sidebar {#9d571b1d46804a01bcd8fbb9a4677af4} - - -Located on the left, this the sidebar includes several collapsible sections that categorize the different types of pre-built components available in Langflow. Use the search bar to locate components by name. - - -![](./1769489314.png) - - -### Canvas {#5b050d0f8406436a9201f4711f0063ae} - - -The canvas is the main area in the center where you can drag and drop components to create workflows. - - -![](./1224026366.png) - - -Use canvas controls in the bottom left side for zooming in and out, resetting the view, and locking or unlocking the canvas. - - -### Top Navigation Bar {#603c9941be154e1eba4e7c9ec9554b92} - - -In the top navigation bar, the dropdown menu labeled with the project name offers several management and customization options for the current flow in the Langflow Workspace. - - -![](./1801518600.png) - -- **New**: Create a new flow from scratch. -- **Settings**: Adjust settings specific to the current flow, such as its Name, Description, and Endpoint Name. -- **Logs**: View logs for the current project, including execution history, errors, and other runtime events. -- **Import**: Import a flow or component from a JSON file into the Workspace. -- **Export**: Export the current flow as a JSON file. -- **Undo (⌘Z)**: Revert the last action taken in the project. -- **Redo (⌘Y)**: Reapply a previously undone action. -- **Refresh All**: Refresh all components and delete cache. - -### Toolbar {#8038bcc315c34350a1f587e1ad0de59f} - - -The toolbar at the bottom-right corner that provides options for executing, accessing the API, and sharing workflows. - - -![](./563382604.png) - -- **Playground**: Button that executes the current flow in the workspace. -- **API**: Provides API access details and integration options for the current flow. -- **Share**: Allows users to share their AI with others. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 44822952a6ad..977b856ed4f9 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -8,9 +8,9 @@ const { remarkCodeHike } = require("@code-hike/mdx"); /** @type {import('@docusaurus/types').Config} */ const config = { title: "Langflow Documentation", - tagline: "Langflow is a GUI for LangChain, designed with react-flow", + tagline: "Langflow is a low-code app builder for RAG and multi-agent AI applications.", favicon: "img/favicon.ico", - url: "https://langflow-ai.github.io", + url: "https://docs.langflow.org", baseUrl: "/", onBrokenLinks: "throw", onBrokenMarkdownLinks: "warn", @@ -19,7 +19,7 @@ const config = { trailingSlash: false, staticDirectories: ["static"], customFields: { - mendableAnonKey: process.env.MENDABLE_ANON_KEY, + mendableAnonKey: "b7f52734-297c-41dc-8737-edbd13196394", // Mendable Anon Client-side key, safe to expose to the public }, i18n: { defaultLocale: "en", @@ -28,7 +28,7 @@ const config = { presets: [ [ - "classic", + "@docusaurus/preset-classic", /** @type {import('@docusaurus/preset-classic').Options} */ ({ docs: { @@ -46,10 +46,20 @@ const config = { ], ], }, + sitemap: { + // https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-sitemap + // https://developers.google.com/search/docs/crawling-indexing/sitemaps/build-sitemap + lastmod: 'datetime', + changefreq: null, + priority: null, + }, gtag: { trackingID: "G-XHC7G628ZP", anonymizeIP: true, }, + googleTagManager: { + containerId: "GTM-NK5M4ZT8", + }, blog: false, theme: { customCss: [ @@ -68,6 +78,38 @@ const config = { plugins: [ ["docusaurus-node-polyfills", { excludeAliases: ["console"] }], "docusaurus-plugin-image-zoom", + [ + '@docusaurus/plugin-client-redirects', + { + redirects: [ + { + to: '/', + from: ['/whats-new-a-new-chapter-langflow', '/👋 Welcome-to-Langflow'], + }, + { + to: '/getting-started-installation', + from: '/getting-started-common-installation-issues', + }, + { + to: '/workspace-overview', + from: ['/365085a8-a90a-43f9-a779-f8769ec7eca1', '/My-Collection', '/workspace', '/settings-project-general-settings'], + }, + { + to: '/components-overview', + from: '/components', + }, + { + to: '/configuration-global-variables', + from: '/settings-global-variables', + }, + // add more redirects like this + // { + // to: '/docs/anotherpage', + // from: ['/docs/legacypage1', '/docs/legacypage2'], + // }, + ], + }, + ], // .... async function myPlugin(context, options) { return { @@ -84,12 +126,6 @@ const config = { themeConfig: /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ ({ - sitemap: { - // https://www.sitemaps.org/protocol.html#xmlTagDefinitions - changefreq: "weekly", - priority: 0.5, - ignorePatterns: [], - }, navbar: { hideOnScroll: true, title: "Langflow", diff --git a/docs/package-lock.json b/docs/package-lock.json index 937d1f6f8ce9..753a91d57701 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -10,6 +10,8 @@ "dependencies": { "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "^3.2.0", + "@docusaurus/plugin-client-redirects": "^3.4.0", + "@docusaurus/plugin-google-tag-manager": "^3.2.0", "@docusaurus/preset-classic": "^3.2.0", "@easyops-cn/docusaurus-search-local": "^0.44.3", "@mdx-js/react": "^3.0.1", @@ -258,12 +260,12 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", - "license": "MIT", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" }, "engines": { @@ -271,30 +273,28 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.8.tgz", - "integrity": "sha512-c4IM7OTg6k1Q+AJ153e2mc2QVTezTwnb4VzquwcyiEzGnW0Kedv4do/TrkU98qPeC5LNiMt/QXwIjzYXLBpyZg==", - "license": "MIT", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.2.tgz", + "integrity": "sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.8.tgz", - "integrity": "sha512-6AWcmZC/MZCO0yKys4uhg5NlxL0ESF3K6IAaoQ+xSXvPyPyxNWRafP+GDbI88Oh68O7QkJgmEtedWPM9U0pZNg==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", + "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.8", - "@babel/helper-compilation-targets": "^7.24.8", - "@babel/helper-module-transforms": "^7.24.8", - "@babel/helpers": "^7.24.8", - "@babel/parser": "^7.24.8", - "@babel/template": "^7.24.7", - "@babel/traverse": "^7.24.8", - "@babel/types": "^7.24.8", + "@babel/code-frame": "^7.26.0", + "@babel/generator": "^7.26.0", + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.0", + "@babel/parser": "^7.26.0", + "@babel/template": "^7.25.9", + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.26.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -319,54 +319,51 @@ } }, "node_modules/@babel/generator": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.8.tgz", - "integrity": "sha512-47DG+6F5SzOi0uEvK4wMShmn5yY0mVjVJoWTphdY2B4Rx9wHgjK7Yhtr0ru6nE+sn0v38mzrWOlah0p/YlHHOQ==", - "license": "MIT", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.2.tgz", + "integrity": "sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw==", "dependencies": { - "@babel/types": "^7.24.8", + "@babel/parser": "^7.26.2", + "@babel/types": "^7.26.0", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^2.5.1" + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz", - "integrity": "sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.25.9.tgz", + "integrity": "sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g==", "dependencies": { - "@babel/types": "^7.24.7" + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz", - "integrity": "sha512-xZeCVVdwb4MsDBkkyZ64tReWYrLRHlMN72vP7Bdm3OUOuyFZExhsHUUnuWnm2/XOlAJzR0LfPpB56WXZn0X/lA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.25.9.tgz", + "integrity": "sha512-C47lC7LIDCnz0h4vai/tpNOI95tCd5ZT3iBt/DBH5lXKHZsyNQv18yf1wIIg2ntiQNgmAvA+DgZ82iW8Qdym8g==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.8.tgz", - "integrity": "sha512-oU+UoqCHdp+nWVDkpldqIQL/i/bvAv53tRqLG/s+cOXxe66zOYLU7ar/Xs3LdmBihrUMEUhwu6dMZwbNOYDwvw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz", + "integrity": "sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==", "dependencies": { - "@babel/compat-data": "^7.24.8", - "@babel/helper-validator-option": "^7.24.8", - "browserslist": "^4.23.1", + "@babel/compat-data": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -384,19 +381,16 @@ } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.8.tgz", - "integrity": "sha512-4f6Oqnmyp2PP3olgUMmOwC3akxSm5aBYraQ6YDdKy7NcAMkDECHWG0DEnV6M2UAkERgIBhYt8S27rURPg7SxWA==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-member-expression-to-functions": "^7.24.8", - "@babel/helper-optimise-call-expression": "^7.24.7", - "@babel/helper-replace-supers": "^7.24.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.9.tgz", + "integrity": "sha512-UTZQMvt0d/rSz6KI+qdu7GQze5TIajwTS++GUozlw8VBJDEOAqSXwm1WvmYEZwqdqSGQshRocPDqrt4HBZB3fQ==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-member-expression-to-functions": "^7.25.9", + "@babel/helper-optimise-call-expression": "^7.25.9", + "@babel/helper-replace-supers": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9", + "@babel/traverse": "^7.25.9", "semver": "^6.3.1" }, "engines": { @@ -410,19 +404,17 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", "bin": { "semver": "bin/semver.js" } }, "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.24.7.tgz", - "integrity": "sha512-03TCmXy2FtXJEZfbXDTSqq1fRJArk7lX9DOFC/47VthYcxyIOx+eXQmdo6DOQvrbpIix+KfXwvuXdFDZHxt+rA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.9.tgz", + "integrity": "sha512-ORPNZ3h6ZRkOyAa/SaHU+XsLZr0UQzRwuDQ0cczIA17nAzZ+85G5cVkOJIj7QavLZGSe8QXUmNFxSZzjcZF9bw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "regexpu-core": "^5.3.1", + "@babel/helper-annotate-as-pure": "^7.25.9", + "regexpu-core": "^6.1.1", "semver": "^6.3.1" }, "engines": { @@ -457,80 +449,38 @@ "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz", - "integrity": "sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz", - "integrity": "sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==", - "license": "MIT", - "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz", - "integrity": "sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.8.tgz", - "integrity": "sha512-LABppdt+Lp/RlBxqrh4qgf1oEH/WxdzQNDJIu5gC/W1GyvPVrOBiItmmM8wan2fm4oYqFuFfkXmlGpLQhPY8CA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.25.9.tgz", + "integrity": "sha512-wbfdZ9w5vk0C0oyHqAJbc62+vet5prjj01jjJ8sKn3j9h3MQQlflEdXYvuqRWjHnM12coDEqiC1IRCi0U/EKwQ==", "dependencies": { - "@babel/traverse": "^7.24.8", - "@babel/types": "^7.24.8" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", - "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.8.tgz", - "integrity": "sha512-m4vWKVqvkVAWLXfHCCfff2luJj86U+J0/x+0N3ArG/tP0Fq7zky2dYwMbtPmkc/oulkkbjdL3uWzuoBwQ8R00Q==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7" + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -540,35 +490,32 @@ } }, "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz", - "integrity": "sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.25.9.tgz", + "integrity": "sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ==", "dependencies": { - "@babel/types": "^7.24.7" + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", - "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz", + "integrity": "sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.24.7.tgz", - "integrity": "sha512-9pKLcTlZ92hNZMQfGCHImUpDOlAgkkpqalWEeftW5FBya75k8Li2ilerxkM/uBEj01iBZXcCIB/bwvDYgWyibA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.9.tgz", + "integrity": "sha512-IZtukuUeBbhgOcaW2s06OXTzVNJR0ybm4W5xC1opWFFJMZbwRj5LCk+ByYH7WdZPZTt8KnFwA8pvjN2yqcPlgw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-wrap-function": "^7.24.7" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-wrap-function": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -578,14 +525,13 @@ } }, "node_modules/@babel/helper-replace-supers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.7.tgz", - "integrity": "sha512-qTAxxBM81VEyoAY0TtLrx1oAEJc09ZK67Q9ljQToqCnA+55eNwCORaxlKyu+rNfX86o8OXRUSNUnrtsAZXM9sg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.25.9.tgz", + "integrity": "sha512-IiDqTOTBQy0sWyeXyGSC5TBJpGFXBkRynjBeXsvbhQFKj2viwJC76Epz35YLU1fpe/Am6Vppb7W7zM4fPQzLsQ==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-member-expression-to-functions": "^7.24.7", - "@babel/helper-optimise-call-expression": "^7.24.7" + "@babel/helper-member-expression-to-functions": "^7.25.9", + "@babel/helper-optimise-call-expression": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -595,204 +541,113 @@ } }, "node_modules/@babel/helper-simple-access": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", - "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.25.9.tgz", + "integrity": "sha512-c6WHXuiaRsJTyHYLJV75t9IqsmTbItYfdj99PnzYGQZkYKvan5/2jKJ7gu31J3/BJ/A18grImSPModuyG/Eo0Q==", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz", - "integrity": "sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ==", - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", - "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.25.9.tgz", + "integrity": "sha512-K4Du3BFa3gvyhzgPcntrkDgZzQaq6uozzcpGbOO1OEJaI+EJdqWIMTLgFgQf6lrfiDFo5FU+BxKepI9RmZqahA==", "dependencies": { - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", - "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", - "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-wrap-function": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.24.7.tgz", - "integrity": "sha512-N9JIYk3TD+1vq/wn77YnJOqMtfWhNewNE+DJV4puD2X7Ew9J4JvrzrFDfTfyv5EgEXVy9/Wt8QiOErzEmv5Ifw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.25.9.tgz", + "integrity": "sha512-ETzz9UTjQSTmw39GboatdymDq4XIQbR8ySgVrylRhPOFpsd+JrKHIuF0de7GCWmem+T4uC5z7EZguod7Wj4A4g==", "dependencies": { - "@babel/helper-function-name": "^7.24.7", - "@babel/template": "^7.24.7", - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/template": "^7.25.9", + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.8.tgz", - "integrity": "sha512-gV2265Nkcz7weJJfvDoAEVzC1e2OTDpkGbEsebse8koXUJUXPsCMi7sRo/+SPMuMZ9MtUPnGwITTnQnU5YjyaQ==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", + "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.8" + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.0" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.24.7", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "license": "MIT", + "node_modules/@babel/parser": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.2.tgz", + "integrity": "sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==", "dependencies": { - "color-convert": "^1.9.0" + "@babel/types": "^7.26.0" }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "bin": { + "parser": "bin/babel-parser.js" }, "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "license": "MIT", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "license": "MIT" - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "license": "MIT", - "engines": { - "node": ">=4" + "node": ">=6.0.0" } }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "license": "MIT", + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.9.tgz", + "integrity": "sha512-ZkRyVkThtxQ/J6nv3JFYv1RYY+JT5BvU0y3k5bWrmuG4woXypRa4PXmm9RhOwodRkYFWqC0C0cqcJ4OqR7kW+g==", "dependencies": { - "has-flag": "^3.0.0" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/parser": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.8.tgz", - "integrity": "sha512-WzfbgXOkGzZiXXCqk43kKwZjzwx4oulxZi3nq2TYL9mOjQv6kYwul9mz6ID36njuL7Xkp6nJEfok848Zj10j/w==", - "license": "MIT", - "bin": { - "parser": "bin/babel-parser.js" + "node": ">=6.9.0" }, - "engines": { - "node": ">=6.0.0" + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.7.tgz", - "integrity": "sha512-TiT1ss81W80eQsN+722OaeQMY/G4yTb4G9JrqeiDADs3N8lbPMGldWi9x8tyqCW5NLx1Jh2AvkE6r6QvEltMMQ==", - "license": "MIT", + "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.9.tgz", + "integrity": "sha512-MrGRLZxLD/Zjj0gdU15dfs+HH/OXvnw/U4jJD8vpcP2CJQapPEv1IWwjc/qMg7ItBlPwSv1hRBbb7LeuANdcnw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -802,12 +657,11 @@ } }, "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.7.tgz", - "integrity": "sha512-unaQgZ/iRu/By6tsjMZzpeBZjChYfLYry6HrEXPoz3KmfF0sVBQ1l8zKMQ4xRGLWVsjuvB8nQfjNP/DcfEOCsg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.9.tgz", + "integrity": "sha512-2qUwwfAFpJLZqxd02YW9btUCZHl+RFvdDkNfZwaIJrvB8Tesjsk8pEQkTvGwZXLqXUx/2oyY3ySRhm6HOXuCug==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -817,14 +671,13 @@ } }, "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz", - "integrity": "sha512-+izXIbke1T33mY4MSNnrqhPXDz01WYhEf3yF5NbnUtkiNnm+XBZJl3kNfoK6NKmYlz/D07+l2GWVK/QfDkNCuQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.25.9.tgz", + "integrity": "sha512-6xWgLZTJXwilVjlnV7ospI3xi+sl8lN8rXXbBD6vYn3UYDlGsag8wrZkKcSI8G6KgqKP7vNFaDgeDnfAABq61g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", - "@babel/plugin-transform-optional-chaining": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9", + "@babel/plugin-transform-optional-chaining": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -834,13 +687,12 @@ } }, "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.7.tgz", - "integrity": "sha512-utA4HuR6F4Vvcr+o4DnjL8fCOlgRFGbeeBEGNg3ZTrLFw6VWG5XmUrvcQ0FjIYMU2ST4XcR2Wsp7t9qOAPnxMg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.9.tgz", + "integrity": "sha512-aLnMXYPnzwwqhYSCyXfKkIkYgJ8zv9RK+roo9DkTXz38ynIhd9XCbN08s3MGvqL2MYGVUGdRQLL/JqBIeJhJBg==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -861,45 +713,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-async-generators": { - "version": "7.8.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", - "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-properties": { - "version": "7.12.13", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", - "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.12.13" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-class-static-block": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", - "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/@babel/plugin-syntax-dynamic-import": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", @@ -912,25 +725,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-export-namespace-from": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", - "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.3" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.7.tgz", - "integrity": "sha512-Ec3NRUMoi8gskrkBe3fNmEQfxDvY8bgfQpz6jlk/41kX9eUjvpyqWU7PBP/pLAvMaSQjbMNKJmvX57jP+M6bPg==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.26.0.tgz", + "integrity": "sha512-QCWT5Hh830hK5EQa7XzuqIkQU9tT/whqbDz7kuaZMHFl1inRRg7JnuAEOQ0Ur0QUl0NufCk1msK2BeY79Aj/eg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -940,12 +740,11 @@ } }, "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz", - "integrity": "sha512-hbX+lKKeUMGihnK8nvKqmXBInriT3GVjzXKFriV3YC6APGxMbP8RZNFwy91+hocLXq90Mta+HshoB31802bb8A==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", + "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -954,139 +753,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-import-meta": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", - "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-json-strings": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", - "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz", - "integrity": "sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-logical-assignment-operators": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", - "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", - "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-numeric-separator": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", - "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.10.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-object-rest-spread": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", - "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-catch-binding": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", - "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-chaining": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", - "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-private-property-in-object": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", - "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-top-level-await": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", - "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.14.5" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1096,12 +768,11 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", - "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1127,12 +798,11 @@ } }, "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.7.tgz", - "integrity": "sha512-Dt9LQs6iEY++gXUwY03DNFat5C2NbO48jj+j/bSAz6b3HgPs39qcPiYt77fDObIcFwj3/C2ICX9YMwGflUoSHQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.25.9.tgz", + "integrity": "sha512-6jmooXYIwn9ca5/RylZADJ+EnSxVUS5sjeJ9UPk6RWRzXCmOJCy6dqItPJFpw2cuCangPK4OYr5uhGKcmrm5Qg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1142,15 +812,13 @@ } }, "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.7.tgz", - "integrity": "sha512-o+iF77e3u7ZS4AoAuJvapz9Fm001PuD2V3Lp6OSE4FYQke+cSewYtnek+THqGRWyQloRCyvWL1OkyfNEl9vr/g==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.9.tgz", + "integrity": "sha512-RXV6QAzTBbhDMO9fWwOmwwTuYaiPbggWQ9INdZqAYeSHyG7FzQ+nOZaUUjNwKv9pV3aE4WFqFm1Hnbci5tBCAw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-remap-async-to-generator": "^7.24.7", - "@babel/plugin-syntax-async-generators": "^7.8.4" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-remap-async-to-generator": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1160,14 +828,13 @@ } }, "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz", - "integrity": "sha512-SQY01PcJfmQ+4Ash7NE+rpbLFbmqA2GPIgqzxfFTL4t1FKRq4zTms/7htKpoCUI9OcFYgzqfmCdH53s6/jn5fA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.25.9.tgz", + "integrity": "sha512-NT7Ejn7Z/LjUH0Gv5KsBCxh7BH3fbLTV0ptHvpeMvrt3cPThHfJfst9Wrb7S8EvJ7vRTFI7z+VAvFVEQn/m5zQ==", "dependencies": { - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-remap-async-to-generator": "^7.24.7" + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-remap-async-to-generator": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1177,12 +844,11 @@ } }, "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz", - "integrity": "sha512-yO7RAz6EsVQDaBH18IDJcMB1HnrUn2FJ/Jslc/WtPPWcjhpUJXU/rjbwmluzp7v/ZzWcEhTMXELnnsz8djWDwQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.25.9.tgz", + "integrity": "sha512-toHc9fzab0ZfenFpsyYinOX0J/5dgJVA2fm64xPewu7CoYHWEivIWKxkK2rMi4r3yQqLnVmheMXRdG+k239CgA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1192,12 +858,11 @@ } }, "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.7.tgz", - "integrity": "sha512-Nd5CvgMbWc+oWzBsuaMcbwjJWAcp5qzrbg69SZdHSP7AMY0AbWFqFO0WTFCA1jxhMCwodRwvRec8k0QUbZk7RQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.9.tgz", + "integrity": "sha512-1F05O7AYjymAtqbsFETboN1NvBdcnzMerO+zlMyJBEz6WkMdejvGWw9p05iTSjC85RLlBseHHQpYaM4gzJkBGg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1207,13 +872,12 @@ } }, "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.7.tgz", - "integrity": "sha512-vKbfawVYayKcSeSR5YYzzyXvsDFWU2mD8U5TFeXtbCPLFUqe7GyCgvO6XDHzje862ODrOwy6WCPmKeWHbCFJ4w==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.25.9.tgz", + "integrity": "sha512-bbMAII8GRSkcd0h0b4X+36GksxuheLFjP65ul9w6C3KgAamI3JqErNgSrosX6ZPj+Mpim5VvEbawXxJCyEUV3Q==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1223,14 +887,12 @@ } }, "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz", - "integrity": "sha512-HMXK3WbBPpZQufbMG4B46A90PkuuhN9vBCb5T8+VAHqvAqvcLi+2cKoukcpmUYkszLhScU3l1iudhrks3DggRQ==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.26.0.tgz", + "integrity": "sha512-6J2APTs7BDDm+UMqP1useWqhcRAXo0WIoVj26N7kPFB6S73Lgvyka4KTZYIxtgYXiN5HTyRObA72N2iu628iTQ==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-class-static-block": "^7.14.5" + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1240,18 +902,15 @@ } }, "node_modules/@babel/plugin-transform-classes": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.8.tgz", - "integrity": "sha512-VXy91c47uujj758ud9wx+OMgheXm4qJfyhj1P18YvlrQkNOSrwsteHk+EFS3OMGfhMhpZa0A+81eE7G4QC+3CA==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-compilation-targets": "^7.24.8", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.8", - "@babel/helper-replace-supers": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.9.tgz", + "integrity": "sha512-mD8APIXmseE7oZvZgGABDyM34GUmK45Um2TXiBUt7PnuAxrgoSVf123qUzPxEr/+/BHrRn5NMZCdE2m/1F8DGg==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-replace-supers": "^7.25.9", + "@babel/traverse": "^7.25.9", "globals": "^11.1.0" }, "engines": { @@ -1262,13 +921,12 @@ } }, "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz", - "integrity": "sha512-25cS7v+707Gu6Ds2oY6tCkUwsJ9YIDbggd9+cu9jzzDgiNq7hR/8dkzxWfKWnTic26vsI3EsCXNd4iEB6e8esQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.25.9.tgz", + "integrity": "sha512-HnBegGqXZR12xbcTHlJ9HGxw1OniltT26J5YpfruGqtUHlz/xKf/G2ak9e+t0rVqrjXa9WOhvYPz1ERfMj23AA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/template": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/template": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1278,12 +936,11 @@ } }, "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.8.tgz", - "integrity": "sha512-36e87mfY8TnRxc7yc6M9g9gOB7rKgSahqkIKwLpz4Ppk2+zC2Cy1is0uwtuSG6AE4zlTOUa+7JGz9jCJGLqQFQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.25.9.tgz", + "integrity": "sha512-WkCGb/3ZxXepmMiX101nnGiU+1CAdut8oHyEOHxkKuS1qKpU2SMXE2uSvfz8PBuLd49V6LEsbtyPhWC7fnkgvQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.8" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1293,13 +950,12 @@ } }, "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz", - "integrity": "sha512-ZOA3W+1RRTSWvyqcMJDLqbchh7U4NRGqwRfFSVbOLS/ePIP4vHB5e8T8eXcuqyN1QkgKyj5wuW0lcS85v4CrSw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.25.9.tgz", + "integrity": "sha512-t7ZQ7g5trIgSRYhI9pIJtRl64KHotutUJsh4Eze5l7olJv+mRSg4/MmbZ0tv1eeqRbdvo/+trvJD/Oc5DmW2cA==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1309,12 +965,11 @@ } }, "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz", - "integrity": "sha512-JdYfXyCRihAe46jUIliuL2/s0x0wObgwwiGxw/UbgJBr20gQBThrokO4nYKgWkD7uBaqM7+9x5TU7NkExZJyzw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.25.9.tgz", + "integrity": "sha512-LZxhJ6dvBb/f3x8xwWIuyiAHy56nrRG3PeYTpBkkzkYRRQ6tJLu68lEF5VIqMUZiAV7a8+Tb78nEoMCMcqjXBw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1323,14 +978,27 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.9.tgz", + "integrity": "sha512-0UfuJS0EsXbRvKnwcLjFtJy/Sxc5J5jhLHnFhy7u4zih97Hz6tJkLU+O+FMMrNZrosUPxDi6sYxJ/EA8jDiAog==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz", - "integrity": "sha512-sc3X26PhZQDb3JhORmakcbvkeInvxz+A8oda99lj7J60QRuPZvNAk9wQlTBS1ZynelDrDmTU4pw1tyc5d5ZMUg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.25.9.tgz", + "integrity": "sha512-GCggjexbmSLaFhqsojeugBpeaRIgWNTcgKVq/0qIteFEqY2A+b9QidYadrWlnbWQUrW5fn+mCvf3tr7OeBFTyg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-dynamic-import": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1340,13 +1008,12 @@ } }, "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz", - "integrity": "sha512-Rqe/vSc9OYgDajNIK35u7ot+KeCoetqQYFXM4Epf7M7ez3lWlOjrDjrwMei6caCVhfdw+mIKD4cgdGNy5JQotQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.25.9.tgz", + "integrity": "sha512-KRhdhlVk2nObA5AYa7QMgTMTVJdfHprfpAk4DjZVtllqRg9qarilstTKEhpVjyt+Npi8ThRyiV8176Am3CodPA==", "dependencies": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1356,13 +1023,11 @@ } }, "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz", - "integrity": "sha512-v0K9uNYsPL3oXZ/7F9NNIbAj2jv1whUEtyA6aujhekLs56R++JDQuzRcP2/z4WX5Vg/c5lE9uWZA0/iUoFhLTA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.25.9.tgz", + "integrity": "sha512-2NsEz+CxzJIVOPx2o9UsW1rXLqtChtLoVnwYHHiB04wS5sgn7mrV45fWMBX0Kk+ub9uXytVYfNP2HjbVbCB3Ww==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1372,13 +1037,12 @@ } }, "node_modules/@babel/plugin-transform-for-of": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz", - "integrity": "sha512-wo9ogrDG1ITTTBsy46oGiN1dS9A7MROBTcYsfS8DtsImMkHk9JXJ3EWQM6X2SUw4x80uGPlwj0o00Uoc6nEE3g==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.25.9.tgz", + "integrity": "sha512-LqHxduHoaGELJl2uhImHwRQudhCM50pT46rIBNvtT/Oql3nqiS3wOwP+5ten7NpYSXrrVLgtZU3DZmPtWZo16A==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1388,14 +1052,13 @@ } }, "node_modules/@babel/plugin-transform-function-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.7.tgz", - "integrity": "sha512-U9FcnA821YoILngSmYkW6FjyQe2TyZD5pHt4EVIhmcTkrJw/3KqcrRSxuOo5tFZJi7TE19iDyI1u+weTI7bn2w==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.9.tgz", + "integrity": "sha512-8lP+Yxjv14Vc5MuWBpJsoUCd3hD6V9DgBon2FVYL4jJgbnVQ9fTgYmonchzZJOVNgzEgbxp4OwAf6xz6M/14XA==", "dependencies": { - "@babel/helper-compilation-targets": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1405,13 +1068,11 @@ } }, "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz", - "integrity": "sha512-2yFnBGDvRuxAaE/f0vfBKvtnvvqU8tGpMHqMNpTN2oWMKIR3NqFkjaAgGwawhqK/pIN2T3XdjGPdaG0vDhOBGw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.25.9.tgz", + "integrity": "sha512-xoTMk0WXceiiIvsaquQQUaLLXSW1KJ159KP87VilruQm0LNNGxWzahxSS6T6i4Zg3ezp4vA4zuwiNUR53qmQAw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-json-strings": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1421,12 +1082,11 @@ } }, "node_modules/@babel/plugin-transform-literals": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.7.tgz", - "integrity": "sha512-vcwCbb4HDH+hWi8Pqenwnjy+UiklO4Kt1vfspcQYFhJdpthSnW8XvWGyDZWKNVrVbVViI/S7K9PDJZiUmP2fYQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.9.tgz", + "integrity": "sha512-9N7+2lFziW8W9pBl2TzaNht3+pgMIRP74zizeCSrtnSKVdUl8mAjjOP2OOVQAfZ881P2cNjDj1uAMEdeD50nuQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1436,13 +1096,11 @@ } }, "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz", - "integrity": "sha512-4D2tpwlQ1odXmTEIFWy9ELJcZHqrStlzK/dAOWYyxX3zT0iXQB6banjgeOJQXzEc4S0E0a5A+hahxPaEFYftsw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.25.9.tgz", + "integrity": "sha512-wI4wRAzGko551Y8eVf6iOY9EouIDTtPb0ByZx+ktDGHwv6bHFimrgJM/2T021txPZ2s4c7bqvHbd+vXG6K948Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1452,12 +1110,11 @@ } }, "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz", - "integrity": "sha512-T/hRC1uqrzXMKLQ6UCwMT85S3EvqaBXDGf0FaMf4446Qx9vKwlghvee0+uuZcDUCZU5RuNi4781UQ7R308zzBw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.25.9.tgz", + "integrity": "sha512-PYazBVfofCQkkMzh2P6IdIUaCEWni3iYEerAsRWuVd8+jlM1S9S9cz1dF9hIzyoZ8IA3+OwVYIp9v9e+GbgZhA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1467,13 +1124,12 @@ } }, "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz", - "integrity": "sha512-9+pB1qxV3vs/8Hdmz/CulFB8w2tuu6EB94JZFsjdqxQokwGa9Unap7Bo2gGBGIvPmDIVvQrom7r5m/TCDMURhg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.25.9.tgz", + "integrity": "sha512-g5T11tnI36jVClQlMlt4qKDLlWnG5pP9CSM4GhdRciTNMRgkfpo5cR6b4rGIOYPgRRuFAvwjPQ/Yk+ql4dyhbw==", "dependencies": { - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-module-transforms": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1483,14 +1139,13 @@ } }, "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.8.tgz", - "integrity": "sha512-WHsk9H8XxRs3JXKWFiqtQebdh9b/pTk4EgueygFzYlTKAg0Ud985mSevdNjdXdFBATSKVJGQXP1tv6aGbssLKA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.25.9.tgz", + "integrity": "sha512-dwh2Ol1jWwL2MgkCzUSOvfmKElqQcuswAZypBSUsScMXvgdT8Ekq5YA6TtqpTVWH+4903NmboMuH1o9i8Rxlyg==", "dependencies": { - "@babel/helper-module-transforms": "^7.24.8", - "@babel/helper-plugin-utils": "^7.24.8", - "@babel/helper-simple-access": "^7.24.7" + "@babel/helper-module-transforms": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-simple-access": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1500,15 +1155,14 @@ } }, "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.7.tgz", - "integrity": "sha512-GYQE0tW7YoaN13qFh3O1NCY4MPkUiAH3fiF7UcV/I3ajmDKEdG3l+UOcbAm4zUE3gnvUU+Eni7XrVKo9eO9auw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.9.tgz", + "integrity": "sha512-hyss7iIlH/zLHaehT+xwiymtPOpsiwIIRlCAOwBB04ta5Tt+lNItADdlXw3jAWZ96VJ2jlhl/c+PNIQPKNfvcA==", "dependencies": { - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7" + "@babel/helper-module-transforms": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1518,13 +1172,12 @@ } }, "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz", - "integrity": "sha512-3aytQvqJ/h9z4g8AsKPLvD4Zqi2qT+L3j7XoFFu1XBlZWEl2/1kWnhmAbxpLgPrHSY0M6UA02jyTiwUVtiKR6A==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.25.9.tgz", + "integrity": "sha512-bS9MVObUgE7ww36HEfwe6g9WakQ0KF07mQF74uuXdkoziUPfKyu/nIm663kz//e5O1nPInPFx36z7WJmJ4yNEw==", "dependencies": { - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-module-transforms": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1534,13 +1187,12 @@ } }, "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz", - "integrity": "sha512-/jr7h/EWeJtk1U/uz2jlsCioHkZk1JJZVcc8oQsJ1dUlaJD83f4/6Zeh2aHt9BIFokHIsSeDfhUmju0+1GPd6g==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.25.9.tgz", + "integrity": "sha512-oqB6WHdKTGl3q/ItQhpLSnWWOpjUJLsOCLVyeFgeTktkBSCiurvPOsyt93gibI9CmuKvTUEtWmG5VhZD+5T/KA==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1550,12 +1202,11 @@ } }, "node_modules/@babel/plugin-transform-new-target": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz", - "integrity": "sha512-RNKwfRIXg4Ls/8mMTza5oPF5RkOW8Wy/WgMAp1/F1yZ8mMbtwXW+HDoJiOsagWrAhI5f57Vncrmr9XeT4CVapA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.25.9.tgz", + "integrity": "sha512-U/3p8X1yCSoKyUj2eOBIx3FOn6pElFOKvAAGf8HTtItuPyB+ZeOqfn+mvTtg9ZlOAjsPdK3ayQEjqHjU/yLeVQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1565,13 +1216,11 @@ } }, "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz", - "integrity": "sha512-Ts7xQVk1OEocqzm8rHMXHlxvsfZ0cEF2yomUqpKENHWMF4zKk175Y4q8H5knJes6PgYad50uuRmt3UJuhBw8pQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.25.9.tgz", + "integrity": "sha512-ENfftpLZw5EItALAD4WsY/KUWvhUlZndm5GC7G3evUsVeSJB6p0pBeLQUnRnBCBx7zV0RKQjR9kCuwrsIrjWog==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1581,13 +1230,11 @@ } }, "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz", - "integrity": "sha512-e6q1TiVUzvH9KRvicuxdBTUj4AdKSRwzIyFFnfnezpCfP2/7Qmbb8qbU2j7GODbl4JMkblitCQjKYUaX/qkkwA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.25.9.tgz", + "integrity": "sha512-TlprrJ1GBZ3r6s96Yq8gEQv82s8/5HnCVHtEJScUj90thHQbwe+E5MLhi2bbNHBEJuzrvltXSru+BUxHDoog7Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-numeric-separator": "^7.10.4" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1597,15 +1244,13 @@ } }, "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz", - "integrity": "sha512-4QrHAr0aXQCEFni2q4DqKLD31n2DL+RxcwnNjDFkSG0eNQ/xCavnRkfCUjsyqGC2OviNJvZOF/mQqZBw7i2C5Q==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.25.9.tgz", + "integrity": "sha512-fSaXafEE9CVHPweLYw4J0emp1t8zYTXyzN3UuG+lylqkvYd7RMrsOQ8TYx5RF231be0vqtFC6jnx3UmpJmKBYg==", "dependencies": { - "@babel/helper-compilation-targets": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-transform-parameters": "^7.24.7" + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/plugin-transform-parameters": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1615,13 +1260,12 @@ } }, "node_modules/@babel/plugin-transform-object-super": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz", - "integrity": "sha512-A/vVLwN6lBrMFmMDmPPz0jnE6ZGx7Jq7d6sT/Ev4H65RER6pZ+kczlf1DthF5N0qaPHBsI7UXiE8Zy66nmAovg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.25.9.tgz", + "integrity": "sha512-Kj/Gh+Rw2RNLbCK1VAWj2U48yxxqL2x0k10nPtSdRa0O2xnHXalD0s+o1A6a0W43gJ00ANo38jxkQreckOzv5A==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-replace-supers": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-replace-supers": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1631,13 +1275,11 @@ } }, "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz", - "integrity": "sha512-uLEndKqP5BfBbC/5jTwPxLh9kqPWWgzN/f8w6UwAIirAEqiIVJWWY312X72Eub09g5KF9+Zn7+hT7sDxmhRuKA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.25.9.tgz", + "integrity": "sha512-qM/6m6hQZzDcZF3onzIhZeDHDO43bkNNlOX0i8n3lR6zLbu0GN2d8qfM/IERJZYauhAHSLHy39NF0Ctdvcid7g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1647,14 +1289,12 @@ } }, "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.8.tgz", - "integrity": "sha512-5cTOLSMs9eypEy8JUVvIKOu6NgvbJMnpG62VpIHrTmROdQ+L5mDAaI40g25k5vXti55JWNX5jCkq3HZxXBQANw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.25.9.tgz", + "integrity": "sha512-6AvV0FsLULbpnXeBjrY4dmWF8F7gf8QnvTEoO/wX/5xm/xE1Xo8oPuD3MPS+KS9f9XBEAWN7X1aWr4z9HdOr7A==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.8", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", - "@babel/plugin-syntax-optional-chaining": "^7.8.3" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1664,12 +1304,11 @@ } }, "node_modules/@babel/plugin-transform-parameters": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz", - "integrity": "sha512-yGWW5Rr+sQOhK0Ot8hjDJuxU3XLRQGflvT4lhlSY0DFvdb3TwKaY26CJzHtYllU0vT9j58hc37ndFPsqT1SrzA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.25.9.tgz", + "integrity": "sha512-wzz6MKwpnshBAiRmn4jR8LYz/g8Ksg0o80XmwZDlordjwEk9SxBzTWC7F5ef1jhbrbOW2DJ5J6ayRukrJmnr0g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1679,13 +1318,12 @@ } }, "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.7.tgz", - "integrity": "sha512-COTCOkG2hn4JKGEKBADkA8WNb35TGkkRbI5iT845dB+NyqgO8Hn+ajPbSnIQznneJTa3d30scb6iz/DhH8GsJQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.25.9.tgz", + "integrity": "sha512-D/JUozNpQLAPUVusvqMxyvjzllRaF8/nSrP1s2YGQT/W4LHK4xxsMcHjhOGTS01mp9Hda8nswb+FblLdJornQw==", "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1695,15 +1333,13 @@ } }, "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz", - "integrity": "sha512-9z76mxwnwFxMyxZWEgdgECQglF2Q7cFLm0kMf8pGwt+GSJsY0cONKj/UuO4bOH0w/uAel3ekS4ra5CEAyJRmDA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.25.9.tgz", + "integrity": "sha512-Evf3kcMqzXA3xfYJmZ9Pg1OvKdtqsDMSWBDzZOPLvHiTt36E75jLDQo5w1gtRU95Q4E5PDttrTf25Fw8d/uWLw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-create-class-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1713,12 +1349,11 @@ } }, "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz", - "integrity": "sha512-EMi4MLQSHfd2nrCqQEWxFdha2gBCqU4ZcCng4WBGZ5CJL4bBRW0ptdqqDdeirGZcpALazVVNJqRmsO8/+oNCBA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.25.9.tgz", + "integrity": "sha512-IvIUeV5KrS/VPavfSM/Iu+RE6llrHrYIKY1yfCzyO/lMXHQ+p7uGhonmGVisv6tSBSVgWzMBohTcvkC9vQcQFA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1743,12 +1378,11 @@ } }, "node_modules/@babel/plugin-transform-react-display-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.7.tgz", - "integrity": "sha512-H/Snz9PFxKsS1JLI4dJLtnJgCJRoo0AUm3chP6NYr+9En1JMKloheEiLIhlp5MDVznWo+H3AAC1Mc8lmUEpsgg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.25.9.tgz", + "integrity": "sha512-KJfMlYIUxQB1CJfO3e0+h0ZHWOTLCPP115Awhaz8U0Zpq36Gl/cXlpoyMRnUWlhNUBAzldnCiAZNvCDj7CrKxQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1758,16 +1392,15 @@ } }, "node_modules/@babel/plugin-transform-react-jsx": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.24.7.tgz", - "integrity": "sha512-+Dj06GDZEFRYvclU6k4bme55GKBEWUmByM/eoKuqg4zTNQHiApWRhQph5fxQB2wAEFvRzL1tOEj1RJ19wJrhoA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.25.9.tgz", + "integrity": "sha512-s5XwpQYCqGerXl+Pu6VDL3x0j2d82eiV77UJ8a2mDHAW7j9SWRqQ2y1fNo1Z74CdcYipl5Z41zvjj4Nfzq36rw==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/plugin-syntax-jsx": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/plugin-syntax-jsx": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1777,12 +1410,11 @@ } }, "node_modules/@babel/plugin-transform-react-jsx-development": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.24.7.tgz", - "integrity": "sha512-QG9EnzoGn+Qar7rxuW+ZOsbWOt56FvvI93xInqsZDC5fsekx1AlIO4KIJ5M+D0p0SqSH156EpmZyXq630B8OlQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.25.9.tgz", + "integrity": "sha512-9mj6rm7XVYs4mdLIpbZnHOYdpW42uoiBCTVowg7sP1thUOiANgMb4UtpRivR0pp5iL+ocvUv7X4mZgFRpJEzGw==", "dependencies": { - "@babel/plugin-transform-react-jsx": "^7.24.7" + "@babel/plugin-transform-react-jsx": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1792,13 +1424,12 @@ } }, "node_modules/@babel/plugin-transform-react-pure-annotations": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.7.tgz", - "integrity": "sha512-PLgBVk3fzbmEjBJ/u8kFzOqS9tUeDjiaWud/rRym/yjCo/M9cASPlnrd2ZmmZpQT40fOOrvR8jh+n8jikrOhNA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.25.9.tgz", + "integrity": "sha512-KQ/Takk3T8Qzj5TppkS1be588lkbTp5uj7w6a0LeQaTMSckU/wK0oJ/pih+T690tkgI5jfmg2TqDJvd41Sj1Cg==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1808,12 +1439,11 @@ } }, "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz", - "integrity": "sha512-lq3fvXPdimDrlg6LWBoqj+r/DEWgONuwjuOuQCSYgRroXDH/IdM1C0IZf59fL5cHLpjEH/O6opIRBbqv7ELnuA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.25.9.tgz", + "integrity": "sha512-vwDcDNsgMPDGP0nMqzahDWE5/MLcX8sv96+wfX7as7LoF/kr97Bo/7fI00lXY4wUXYfVmwIIyG80fGZ1uvt2qg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-plugin-utils": "^7.25.9", "regenerator-transform": "^0.15.2" }, "engines": { @@ -1823,13 +1453,27 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-regexp-modifiers": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.26.0.tgz", + "integrity": "sha512-vN6saax7lrA2yA/Pak3sCxuD6F5InBjn9IcrIKQPjpsLvuHYLVroTxjdlVRHjjBWxKOqIwpTXDkOssYT4BFdRw==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz", - "integrity": "sha512-0DUq0pHcPKbjFZCfTss/pGkYMfy3vFWydkUBd9r0GHpIyfs2eCDENvqadMycRS9wZCXR41wucAfJHJmwA0UmoQ==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.25.9.tgz", + "integrity": "sha512-7DL7DKYjn5Su++4RXu8puKZm2XBPHyjWLUidaPEkCUBbE7IPcsrkRHggAOOKydH1dASWdcUBxrkOGNxUv5P3Jg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1839,15 +1483,14 @@ } }, "node_modules/@babel/plugin-transform-runtime": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.24.7.tgz", - "integrity": "sha512-YqXjrk4C+a1kZjewqt+Mmu2UuV1s07y8kqcUf4qYLnoqemhR4gRQikhdAhSVJioMjVTu6Mo6pAbaypEA3jY6fw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.25.9.tgz", + "integrity": "sha512-nZp7GlEl+yULJrClz0SwHPqir3lc0zsPrDHQUcxGspSL7AKrexNSEfTbfqnDNJUO13bgKyfuOLMF8Xqtu8j3YQ==", "dependencies": { - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.10.1", + "babel-plugin-polyfill-corejs3": "^0.10.6", "babel-plugin-polyfill-regenerator": "^0.6.1", "semver": "^6.3.1" }, @@ -1868,12 +1511,11 @@ } }, "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz", - "integrity": "sha512-KsDsevZMDsigzbA09+vacnLpmPH4aWjcZjXdyFKGzpplxhbeB4wYtury3vglQkg6KM/xEPKt73eCjPPf1PgXBA==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.25.9.tgz", + "integrity": "sha512-MUv6t0FhO5qHnS/W8XCbHmiRWOphNufpE1IVxhK5kuN3Td9FT1x4rx4K42s3RYdMXCXpfWkGSbCSd0Z64xA7Ng==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1883,13 +1525,12 @@ } }, "node_modules/@babel/plugin-transform-spread": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz", - "integrity": "sha512-x96oO0I09dgMDxJaANcRyD4ellXFLLiWhuwDxKZX5g2rWP1bTPkBSwCYv96VDXVT1bD9aPj8tppr5ITIh8hBng==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.25.9.tgz", + "integrity": "sha512-oNknIB0TbURU5pqJFVbOOFspVlrpVwo2H1+HUIsVDvp5VauGGDP1ZEvO8Nn5xyMEs3dakajOxlmkNW7kNgSm6A==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1899,12 +1540,11 @@ } }, "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz", - "integrity": "sha512-kHPSIJc9v24zEml5geKg9Mjx5ULpfncj0wRpYtxbvKyTtHCYDkVE3aHQ03FrpEo4gEe2vrJJS1Y9CJTaThA52g==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.25.9.tgz", + "integrity": "sha512-WqBUSgeVwucYDP9U/xNRQam7xV8W5Zf+6Eo7T2SRVUFlhRiMNFdFz58u0KZmCVVqs2i7SHgpRnAhzRNmKfi2uA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1914,12 +1554,11 @@ } }, "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz", - "integrity": "sha512-AfDTQmClklHCOLxtGoP7HkeMw56k1/bTQjwsfhL6pppo/M4TOBSq+jjBUBLmV/4oeFg4GWMavIl44ZeCtmmZTw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.25.9.tgz", + "integrity": "sha512-o97AE4syN71M/lxrCtQByzphAdlYluKPDBzDVzMmfCobUjjhAryZV0AIpRPrxN0eAkxXO6ZLEScmt+PNhj2OTw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1929,12 +1568,11 @@ } }, "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.8.tgz", - "integrity": "sha512-adNTUpDCVnmAE58VEqKlAA6ZBlNkMnWD0ZcW76lyNFN3MJniyGFZfNwERVk8Ap56MCnXztmDr19T4mPTztcuaw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.25.9.tgz", + "integrity": "sha512-v61XqUMiueJROUv66BVIOi0Fv/CUuZuZMl5NkRoCVxLAnMexZ0A3kMe7vvZ0nulxMuMp0Mk6S5hNh48yki08ZA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.8" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1944,15 +1582,15 @@ } }, "node_modules/@babel/plugin-transform-typescript": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.8.tgz", - "integrity": "sha512-CgFgtN61BbdOGCP4fLaAMOPkzWUh6yQZNMr5YSt8uz2cZSSiQONCQFWqsE4NeVfOIhqDOlS9CR3WD91FzMeB2Q==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.25.9.tgz", + "integrity": "sha512-7PbZQZP50tzv2KGGnhh82GSyMB01yKY9scIjf1a+GfZCtInOWqUH5+1EBU4t9fyR5Oykkkc9vFTs4OHrhHXljQ==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.24.7", - "@babel/helper-create-class-features-plugin": "^7.24.8", - "@babel/helper-plugin-utils": "^7.24.8", - "@babel/plugin-syntax-typescript": "^7.24.7" + "@babel/helper-annotate-as-pure": "^7.25.9", + "@babel/helper-create-class-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-skip-transparent-expression-wrappers": "^7.25.9", + "@babel/plugin-syntax-typescript": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1962,12 +1600,11 @@ } }, "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz", - "integrity": "sha512-U3ap1gm5+4edc2Q/P+9VrBNhGkfnf+8ZqppY71Bo/pzZmXhhLdqgaUl6cuB07O1+AQJtCLfaOmswiNbSQ9ivhw==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.25.9.tgz", + "integrity": "sha512-s5EDrE6bW97LtxOcGj1Khcx5AaXwiMmi4toFWRDP9/y0Woo6pXC+iyPu/KuhKtfSrNFd7jJB+/fkOtZy6aIC6Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1977,13 +1614,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz", - "integrity": "sha512-uH2O4OV5M9FZYQrwc7NdVmMxQJOCCzFeYudlZSzUAHRFeOujQefa92E74TQDVskNHCzOXoigEuoyzHDhaEaK5w==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.25.9.tgz", + "integrity": "sha512-Jt2d8Ga+QwRluxRQ307Vlxa6dMrYEMZCgGxoPR8V52rxPyldHu3hdlHspxaqYmE7oID5+kB+UKUB/eWS+DkkWg==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -1993,13 +1629,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz", - "integrity": "sha512-hlQ96MBZSAXUq7ltkjtu3FJCCSMx/j629ns3hA3pXnBXjanNP0LHi+JpPeA81zaWgVK1VGH95Xuy7u0RyQ8kMg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.25.9.tgz", + "integrity": "sha512-yoxstj7Rg9dlNn9UQxzk4fcNivwv4nUYz7fYXBaKxvw/lnmPuOm/ikoELygbYq68Bls3D/D+NBPHiLwZdZZ4HA==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2009,13 +1644,12 @@ } }, "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.7.tgz", - "integrity": "sha512-2G8aAvF4wy1w/AGZkemprdGMRg5o6zPNhbHVImRz3lss55TYCBd6xStN19rt8XJHq20sqV0JbyWjOWwQRwV/wg==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.25.9.tgz", + "integrity": "sha512-8BYqO3GeVNHtx69fdPshN3fnzUNLrWdHhk/icSwigksJGczKSizZ+Z6SBCxTs723Fr5VSNorTIK7a+R2tISvwQ==", "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.24.7", - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-create-regexp-features-plugin": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2025,91 +1659,78 @@ } }, "node_modules/@babel/preset-env": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.8.tgz", - "integrity": "sha512-vObvMZB6hNWuDxhSaEPTKCwcqkAIuDtE+bQGn4XMXne1DSLzFVY8Vmj1bm+mUQXYNN8NmaQEO+r8MMbzPr1jBQ==", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.24.8", - "@babel/helper-compilation-targets": "^7.24.8", - "@babel/helper-plugin-utils": "^7.24.8", - "@babel/helper-validator-option": "^7.24.8", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.24.7", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.7", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.7", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.7", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.26.0.tgz", + "integrity": "sha512-H84Fxq0CQJNdPFT2DrfnylZ3cf5K43rGfWK4LJGPpjKHiZlk0/RzwEus3PDDZZg+/Er7lCA03MVacueUuXdzfw==", + "dependencies": { + "@babel/compat-data": "^7.26.0", + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.25.9", + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.25.9", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.25.9", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.25.9", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.25.9", "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-async-generators": "^7.8.4", - "@babel/plugin-syntax-class-properties": "^7.12.13", - "@babel/plugin-syntax-class-static-block": "^7.14.5", - "@babel/plugin-syntax-dynamic-import": "^7.8.3", - "@babel/plugin-syntax-export-namespace-from": "^7.8.3", - "@babel/plugin-syntax-import-assertions": "^7.24.7", - "@babel/plugin-syntax-import-attributes": "^7.24.7", - "@babel/plugin-syntax-import-meta": "^7.10.4", - "@babel/plugin-syntax-json-strings": "^7.8.3", - "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", - "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", - "@babel/plugin-syntax-numeric-separator": "^7.10.4", - "@babel/plugin-syntax-object-rest-spread": "^7.8.3", - "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", - "@babel/plugin-syntax-optional-chaining": "^7.8.3", - "@babel/plugin-syntax-private-property-in-object": "^7.14.5", - "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-syntax-import-assertions": "^7.26.0", + "@babel/plugin-syntax-import-attributes": "^7.26.0", "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.24.7", - "@babel/plugin-transform-async-generator-functions": "^7.24.7", - "@babel/plugin-transform-async-to-generator": "^7.24.7", - "@babel/plugin-transform-block-scoped-functions": "^7.24.7", - "@babel/plugin-transform-block-scoping": "^7.24.7", - "@babel/plugin-transform-class-properties": "^7.24.7", - "@babel/plugin-transform-class-static-block": "^7.24.7", - "@babel/plugin-transform-classes": "^7.24.8", - "@babel/plugin-transform-computed-properties": "^7.24.7", - "@babel/plugin-transform-destructuring": "^7.24.8", - "@babel/plugin-transform-dotall-regex": "^7.24.7", - "@babel/plugin-transform-duplicate-keys": "^7.24.7", - "@babel/plugin-transform-dynamic-import": "^7.24.7", - "@babel/plugin-transform-exponentiation-operator": "^7.24.7", - "@babel/plugin-transform-export-namespace-from": "^7.24.7", - "@babel/plugin-transform-for-of": "^7.24.7", - "@babel/plugin-transform-function-name": "^7.24.7", - "@babel/plugin-transform-json-strings": "^7.24.7", - "@babel/plugin-transform-literals": "^7.24.7", - "@babel/plugin-transform-logical-assignment-operators": "^7.24.7", - "@babel/plugin-transform-member-expression-literals": "^7.24.7", - "@babel/plugin-transform-modules-amd": "^7.24.7", - "@babel/plugin-transform-modules-commonjs": "^7.24.8", - "@babel/plugin-transform-modules-systemjs": "^7.24.7", - "@babel/plugin-transform-modules-umd": "^7.24.7", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.24.7", - "@babel/plugin-transform-new-target": "^7.24.7", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.7", - "@babel/plugin-transform-numeric-separator": "^7.24.7", - "@babel/plugin-transform-object-rest-spread": "^7.24.7", - "@babel/plugin-transform-object-super": "^7.24.7", - "@babel/plugin-transform-optional-catch-binding": "^7.24.7", - "@babel/plugin-transform-optional-chaining": "^7.24.8", - "@babel/plugin-transform-parameters": "^7.24.7", - "@babel/plugin-transform-private-methods": "^7.24.7", - "@babel/plugin-transform-private-property-in-object": "^7.24.7", - "@babel/plugin-transform-property-literals": "^7.24.7", - "@babel/plugin-transform-regenerator": "^7.24.7", - "@babel/plugin-transform-reserved-words": "^7.24.7", - "@babel/plugin-transform-shorthand-properties": "^7.24.7", - "@babel/plugin-transform-spread": "^7.24.7", - "@babel/plugin-transform-sticky-regex": "^7.24.7", - "@babel/plugin-transform-template-literals": "^7.24.7", - "@babel/plugin-transform-typeof-symbol": "^7.24.8", - "@babel/plugin-transform-unicode-escapes": "^7.24.7", - "@babel/plugin-transform-unicode-property-regex": "^7.24.7", - "@babel/plugin-transform-unicode-regex": "^7.24.7", - "@babel/plugin-transform-unicode-sets-regex": "^7.24.7", + "@babel/plugin-transform-arrow-functions": "^7.25.9", + "@babel/plugin-transform-async-generator-functions": "^7.25.9", + "@babel/plugin-transform-async-to-generator": "^7.25.9", + "@babel/plugin-transform-block-scoped-functions": "^7.25.9", + "@babel/plugin-transform-block-scoping": "^7.25.9", + "@babel/plugin-transform-class-properties": "^7.25.9", + "@babel/plugin-transform-class-static-block": "^7.26.0", + "@babel/plugin-transform-classes": "^7.25.9", + "@babel/plugin-transform-computed-properties": "^7.25.9", + "@babel/plugin-transform-destructuring": "^7.25.9", + "@babel/plugin-transform-dotall-regex": "^7.25.9", + "@babel/plugin-transform-duplicate-keys": "^7.25.9", + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.25.9", + "@babel/plugin-transform-dynamic-import": "^7.25.9", + "@babel/plugin-transform-exponentiation-operator": "^7.25.9", + "@babel/plugin-transform-export-namespace-from": "^7.25.9", + "@babel/plugin-transform-for-of": "^7.25.9", + "@babel/plugin-transform-function-name": "^7.25.9", + "@babel/plugin-transform-json-strings": "^7.25.9", + "@babel/plugin-transform-literals": "^7.25.9", + "@babel/plugin-transform-logical-assignment-operators": "^7.25.9", + "@babel/plugin-transform-member-expression-literals": "^7.25.9", + "@babel/plugin-transform-modules-amd": "^7.25.9", + "@babel/plugin-transform-modules-commonjs": "^7.25.9", + "@babel/plugin-transform-modules-systemjs": "^7.25.9", + "@babel/plugin-transform-modules-umd": "^7.25.9", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.25.9", + "@babel/plugin-transform-new-target": "^7.25.9", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.25.9", + "@babel/plugin-transform-numeric-separator": "^7.25.9", + "@babel/plugin-transform-object-rest-spread": "^7.25.9", + "@babel/plugin-transform-object-super": "^7.25.9", + "@babel/plugin-transform-optional-catch-binding": "^7.25.9", + "@babel/plugin-transform-optional-chaining": "^7.25.9", + "@babel/plugin-transform-parameters": "^7.25.9", + "@babel/plugin-transform-private-methods": "^7.25.9", + "@babel/plugin-transform-private-property-in-object": "^7.25.9", + "@babel/plugin-transform-property-literals": "^7.25.9", + "@babel/plugin-transform-regenerator": "^7.25.9", + "@babel/plugin-transform-regexp-modifiers": "^7.26.0", + "@babel/plugin-transform-reserved-words": "^7.25.9", + "@babel/plugin-transform-shorthand-properties": "^7.25.9", + "@babel/plugin-transform-spread": "^7.25.9", + "@babel/plugin-transform-sticky-regex": "^7.25.9", + "@babel/plugin-transform-template-literals": "^7.25.9", + "@babel/plugin-transform-typeof-symbol": "^7.25.9", + "@babel/plugin-transform-unicode-escapes": "^7.25.9", + "@babel/plugin-transform-unicode-property-regex": "^7.25.9", + "@babel/plugin-transform-unicode-regex": "^7.25.9", + "@babel/plugin-transform-unicode-sets-regex": "^7.25.9", "@babel/preset-modules": "0.1.6-no-external-plugins", "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.10.4", + "babel-plugin-polyfill-corejs3": "^0.10.6", "babel-plugin-polyfill-regenerator": "^0.6.1", - "core-js-compat": "^3.37.1", + "core-js-compat": "^3.38.1", "semver": "^6.3.1" }, "engines": { @@ -2143,17 +1764,16 @@ } }, "node_modules/@babel/preset-react": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.24.7.tgz", - "integrity": "sha512-AAH4lEkpmzFWrGVlHaxJB7RLH21uPQ9+He+eFLWHmF9IuFQVugz8eAsamaW0DXRrTfco5zj1wWtpdcXJUOfsag==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.25.9.tgz", + "integrity": "sha512-D3to0uSPiWE7rBrdIICCd0tJSIGpLaaGptna2+w7Pft5xMqLpA1sz99DK5TZ1TjGbdQ/VI1eCSZ06dv3lT4JOw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-validator-option": "^7.24.7", - "@babel/plugin-transform-react-display-name": "^7.24.7", - "@babel/plugin-transform-react-jsx": "^7.24.7", - "@babel/plugin-transform-react-jsx-development": "^7.24.7", - "@babel/plugin-transform-react-pure-annotations": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-transform-react-display-name": "^7.25.9", + "@babel/plugin-transform-react-jsx": "^7.25.9", + "@babel/plugin-transform-react-jsx-development": "^7.25.9", + "@babel/plugin-transform-react-pure-annotations": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2163,16 +1783,15 @@ } }, "node_modules/@babel/preset-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz", - "integrity": "sha512-SyXRe3OdWwIwalxDg5UtJnJQO+YPcTfwiIY2B0Xlddh9o7jpWLvv8X1RthIeDOxQ+O1ML5BLPCONToObyVQVuQ==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.26.0.tgz", + "integrity": "sha512-NMk1IGZ5I/oHhoXEElcm+xUnL/szL6xflkFZmoEU9xj1qSJXpiS7rsspYo92B4DRCDvZn2erT5LdsCeXAKNCkg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7", - "@babel/helper-validator-option": "^7.24.7", - "@babel/plugin-syntax-jsx": "^7.24.7", - "@babel/plugin-transform-modules-commonjs": "^7.24.7", - "@babel/plugin-transform-typescript": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-syntax-jsx": "^7.25.9", + "@babel/plugin-transform-modules-commonjs": "^7.25.9", + "@babel/plugin-transform-typescript": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2181,17 +1800,10 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/regjsgen": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", - "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==", - "license": "MIT" - }, "node_modules/@babel/runtime": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.8.tgz", - "integrity": "sha512-5F7SDGs1T72ZczbRwbGO9lQi0NLjQxzl6i4lJxLxfW9U5UluCSyEJeniWvnhl3/euNiqQVbo8zruhsDfid0esA==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.0.tgz", + "integrity": "sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -2200,10 +1812,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.24.8.tgz", - "integrity": "sha512-DXG/BhegtMHhnN7YPIvxWd303/9aXvYFD1TjNL3CD6tUrhI2LVsg3Lck0aql5TRH29n4sj3emcROypkZVUfSuA==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.26.0.tgz", + "integrity": "sha512-YXHu5lN8kJCb1LOb9PgV6pvak43X2h4HvRApcN5SdWeaItQOzfn1hgP6jasD6KWQyJDBxrVmA9o9OivlnNJK/w==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -2213,33 +1824,28 @@ } }, "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", - "license": "MIT", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", + "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/code-frame": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/types": "^7.25.9" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.8.tgz", - "integrity": "sha512-t0P1xxAPzEDcEPmjprAQq19NWum4K0EQPjMwZQZbHt+GiZqvjCHjj755Weq1YRPVzBI+3zSfvScfpnuIecVFJQ==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.8", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/parser": "^7.24.8", - "@babel/types": "^7.24.8", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.9.tgz", + "integrity": "sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw==", + "dependencies": { + "@babel/code-frame": "^7.25.9", + "@babel/generator": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/template": "^7.25.9", + "@babel/types": "^7.25.9", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -2248,14 +1854,12 @@ } }, "node_modules/@babel/types": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.8.tgz", - "integrity": "sha512-SkSBEHwwJRU52QEVZBmMBnE5Ux2/6WU1grdYyOhpbCNxbmJrDuDCphBzKZSO3taf0zztp+qkWlymE5tVL5l0TA==", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz", + "integrity": "sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==", "dependencies": { - "@babel/helper-string-parser": "^7.24.8", - "@babel/helper-validator-identifier": "^7.24.7", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2538,6 +2142,29 @@ "react-dom": "*" } }, + "node_modules/@docusaurus/plugin-client-redirects": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-client-redirects/-/plugin-client-redirects-3.4.0.tgz", + "integrity": "sha512-Pr8kyh/+OsmYCvdZhc60jy/FnrY6flD2TEAhl4rJxeVFxnvvRgEhoaIVX8q9MuJmaQoh6frPk94pjs7/6YgBDQ==", + "dependencies": { + "@docusaurus/core": "3.4.0", + "@docusaurus/logger": "3.4.0", + "@docusaurus/utils": "3.4.0", + "@docusaurus/utils-common": "3.4.0", + "@docusaurus/utils-validation": "3.4.0", + "eta": "^2.2.0", + "fs-extra": "^11.1.1", + "lodash": "^4.17.21", + "tslib": "^2.6.0" + }, + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "react": "^18.0.0", + "react-dom": "^18.0.0" + } + }, "node_modules/@docusaurus/plugin-content-blog": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz", @@ -4220,10 +3847,9 @@ } }, "node_modules/@types/estree": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", - "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", - "license": "MIT" + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==" }, "node_modules/@types/estree-jsx": { "version": "1.0.5", @@ -4743,10 +4369,9 @@ } }, "node_modules/acorn": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", - "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", - "license": "MIT", + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "bin": { "acorn": "bin/acorn" }, @@ -4754,15 +4379,6 @@ "node": ">=0.4.0" } }, - "node_modules/acorn-import-attributes": { - "version": "1.9.5", - "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", - "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", - "license": "MIT", - "peerDependencies": { - "acorn": "^8" - } - }, "node_modules/acorn-jsx": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", @@ -5476,10 +5092,9 @@ } }, "node_modules/babel-loader": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.1.3.tgz", - "integrity": "sha512-xG3ST4DglodGf8qSwv0MdeWLhrDsw/32QMdTO5T1ZIp9gQur0HkCyFs7Awskr10JKXFXwpAhiCuYX5oGXnRGbw==", - "license": "MIT", + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.2.1.tgz", + "integrity": "sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==", "dependencies": { "find-cache-dir": "^4.0.0", "schema-utils": "^4.0.0" @@ -5545,13 +5160,12 @@ } }, "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.4.tgz", - "integrity": "sha512-25J6I8NGfa5YkCDogHRID3fVCadIR8/pGl1/spvCkzb6lVn6SR3ojpx9nOn9iEBcUsjY24AmdKm5khcfKdylcg==", - "license": "MIT", + "version": "0.10.6", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.6.tgz", + "integrity": "sha512-b37+KR2i/khY5sKmWNVQAnitvquQbNdWy6lJdsr0kmquCKEEUgMKK4SboVM3HtfnZilfjr4MMQ7vY58FVWDtIA==", "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.1", - "core-js-compat": "^3.36.1" + "@babel/helper-define-polyfill-provider": "^0.6.2", + "core-js-compat": "^3.38.0" }, "peerDependencies": { "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" @@ -6642,9 +6256,9 @@ } }, "node_modules/browserslist": { - "version": "4.23.2", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.2.tgz", - "integrity": "sha512-qkqSyistMYdxAcw+CzbZwlBy8AGmS/eEWs+sEV5TnLRGDOL+C5M2EnH6tlZyg0YoAxGJAFKh61En9BR941GnHA==", + "version": "4.24.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", + "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", "funding": [ { "type": "opencollective", @@ -6659,12 +6273,11 @@ "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001640", - "electron-to-chromium": "^1.4.820", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.1.0" + "caniuse-lite": "^1.0.30001669", + "electron-to-chromium": "^1.5.41", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" @@ -6823,9 +6436,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001641", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001641.tgz", - "integrity": "sha512-Phv5thgl67bHYo1TtMY/MurjkHhV4EDaCosezRXgZ8jzA/Ub+wjxAvbGvjoFENStinwi5kCyOYV3mi5tOGykwA==", + "version": "1.0.30001677", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001677.tgz", + "integrity": "sha512-fmfjsOlJUpMWu+mAAtZZZHz7UEwsUxIIvu1TJfO1HqFQvB/B+ii0xr9B5HpbZY/mC4XZ8SvjHJqtAY6pDPQEog==", "funding": [ { "type": "opencollective", @@ -6839,8 +6452,7 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ], - "license": "CC-BY-4.0" + ] }, "node_modules/ccount": { "version": "2.0.1", @@ -7453,12 +7065,11 @@ } }, "node_modules/core-js-compat": { - "version": "3.37.1", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.37.1.tgz", - "integrity": "sha512-9TNiImhKvQqSUkOvk/mMRZzOANTiEVC7WaBNhHcKM7x+/5E1l5NvsysR19zuDQScE8k+kfQXWRN3AtS/eOSHpg==", - "license": "MIT", + "version": "3.39.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.39.0.tgz", + "integrity": "sha512-VgEUx3VwlExr5no0tXlBt+silBvhTryPwCXRI2Id1PN8WTKu7MreethvddqOubrYxkFdv/RnYrqlv1sFNAUelw==", "dependencies": { - "browserslist": "^4.23.0" + "browserslist": "^4.24.2" }, "funding": { "type": "opencollective", @@ -8486,10 +8097,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.4.827", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.827.tgz", - "integrity": "sha512-VY+J0e4SFcNfQy19MEoMdaIcZLmDCprqvBtkii1WTCTQHpRvf5N8+3kTYCgL/PcntvwQvmMJWTuDPsq+IlhWKQ==", - "license": "ISC" + "version": "1.5.52", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.52.tgz", + "integrity": "sha512-xtoijJTZ+qeucLBDNztDOuQBE1ksqjvNjvqFoST3nGC7fSpqJ+X6BdTBaY5BHG+IhWWmpc6b/KfpeuEDupEPOQ==" }, "node_modules/elliptic": { "version": "6.5.5", @@ -8562,10 +8172,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.17.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz", - "integrity": "sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA==", - "license": "MIT", + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -8623,10 +8232,9 @@ "license": "MIT" }, "node_modules/escalade": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", - "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", - "license": "MIT", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "engines": { "node": ">=6" } @@ -9112,21 +8720,6 @@ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "license": "MIT" }, - "node_modules/fast-url-parser": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", - "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", - "license": "MIT", - "dependencies": { - "punycode": "^1.3.2" - } - }, - "node_modules/fast-url-parser/node_modules/punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", - "license": "MIT" - }, "node_modules/fastq": { "version": "1.17.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", @@ -11430,15 +11023,14 @@ } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "license": "MIT", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-buffer": { @@ -14091,10 +13683,9 @@ } }, "node_modules/mini-css-extract-plugin": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.0.tgz", - "integrity": "sha512-Zs1YsZVfemekSZG+44vBsYTLQORkPMwnlv+aehcxK/NLKC+EGhDB39/YePYYqx/sTk6NnYpuqikhSn7+JIevTA==", - "license": "MIT", + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.2.tgz", + "integrity": "sha512-GJuACcS//jtq4kCtd5ii/M0SZf7OZRH+BxdqXZHaJfb8TJiVl+NgQRPwiYt2EuqeSkNydn/7vP+bcE27C5mb9w==", "dependencies": { "schema-utils": "^4.0.0", "tapable": "^2.2.1" @@ -14330,10 +13921,9 @@ } }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", - "license": "MIT" + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==" }, "node_modules/normalize-path": { "version": "3.0.0", @@ -15239,10 +14829,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", - "license": "ISC" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -16864,10 +16453,9 @@ "license": "MIT" }, "node_modules/regenerate-unicode-properties": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz", - "integrity": "sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==", - "license": "MIT", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz", + "integrity": "sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==", "dependencies": { "regenerate": "^1.4.2" }, @@ -16885,21 +16473,19 @@ "version": "0.15.2", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", - "license": "MIT", "dependencies": { "@babel/runtime": "^7.8.4" } }, "node_modules/regexpu-core": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", - "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", - "license": "MIT", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.1.1.tgz", + "integrity": "sha512-k67Nb9jvwJcJmVpw0jPttR1/zVfnKf8Km0IPatrU/zJ5XeG3+Slx0xLXs9HByJSzXzrlz5EDvN6yLNMDc2qdnw==", "dependencies": { - "@babel/regjsgen": "^0.8.0", "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.1.0", - "regjsparser": "^0.9.1", + "regenerate-unicode-properties": "^10.2.0", + "regjsgen": "^0.8.0", + "regjsparser": "^0.11.0", "unicode-match-property-ecmascript": "^2.0.0", "unicode-match-property-value-ecmascript": "^2.1.0" }, @@ -16907,6 +16493,11 @@ "node": ">=4" } }, + "node_modules/regexpu-core/node_modules/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==" + }, "node_modules/registry-auth-token": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz", @@ -16942,25 +16533,16 @@ "license": "MIT" }, "node_modules/regjsparser": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", - "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", - "license": "BSD-2-Clause", + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.11.2.tgz", + "integrity": "sha512-3OGZZ4HoLJkkAZx/48mTXJNlmqTGOzc0o9OWQPuWpkOlXXPbyN6OafCcoXUnBqE2D3f/T5L+pWc1kdEmnfnRsA==", "dependencies": { - "jsesc": "~0.5.0" + "jsesc": "~3.0.2" }, "bin": { "regjsparser": "bin/parser" } }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", - "bin": { - "jsesc": "bin/jsesc" - } - }, "node_modules/rehype-raw": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", @@ -17648,18 +17230,16 @@ } }, "node_modules/serve-handler": { - "version": "6.1.5", - "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.5.tgz", - "integrity": "sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==", - "license": "MIT", + "version": "6.1.6", + "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.6.tgz", + "integrity": "sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ==", "dependencies": { "bytes": "3.0.0", "content-disposition": "0.5.2", - "fast-url-parser": "1.1.3", "mime-types": "2.1.18", "minimatch": "3.1.2", "path-is-inside": "1.0.2", - "path-to-regexp": "2.2.1", + "path-to-regexp": "3.3.0", "range-parser": "1.2.0" } }, @@ -17685,10 +17265,9 @@ } }, "node_modules/serve-handler/node_modules/path-to-regexp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz", - "integrity": "sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ==", - "license": "MIT" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-3.3.0.tgz", + "integrity": "sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==" }, "node_modules/serve-index": { "version": "1.9.1", @@ -18796,15 +18375,6 @@ "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", "license": "MIT" }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -19025,10 +18595,9 @@ "license": "MIT" }, "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", - "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", - "license": "MIT", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", + "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", "engines": { "node": ">=4" } @@ -19046,7 +18615,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", - "license": "MIT", "dependencies": { "unicode-canonical-property-names-ecmascript": "^2.0.0", "unicode-property-aliases-ecmascript": "^2.0.0" @@ -19056,10 +18624,9 @@ } }, "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz", - "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==", - "license": "MIT", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz", + "integrity": "sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg==", "engines": { "node": ">=4" } @@ -19068,7 +18635,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", - "license": "MIT", "engines": { "node": ">=4" } @@ -19221,9 +18787,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", - "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "funding": [ { "type": "opencollective", @@ -19238,10 +18804,9 @@ "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "dependencies": { - "escalade": "^3.1.2", - "picocolors": "^1.0.1" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -19611,21 +19176,19 @@ "license": "BSD-2-Clause" }, "node_modules/webpack": { - "version": "5.93.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.93.0.tgz", - "integrity": "sha512-Y0m5oEY1LRuwly578VqluorkXbvXKh7U3rLoQCEO04M97ScRr44afGVkI0FQFsXzysk5OgFAxjZAb9rsGQVihA==", - "license": "MIT", + "version": "5.96.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.96.1.tgz", + "integrity": "sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA==", "dependencies": { - "@types/eslint-scope": "^3.7.3", - "@types/estree": "^1.0.5", + "@types/eslint-scope": "^3.7.7", + "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.12.1", "@webassemblyjs/wasm-edit": "^1.12.1", "@webassemblyjs/wasm-parser": "^1.12.1", - "acorn": "^8.7.1", - "acorn-import-attributes": "^1.9.5", - "browserslist": "^4.21.10", + "acorn": "^8.14.0", + "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.17.0", + "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", diff --git a/docs/package.json b/docs/package.json index e4e3cb58136c..842413799555 100644 --- a/docs/package.json +++ b/docs/package.json @@ -13,6 +13,8 @@ "dependencies": { "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "^3.2.0", + "@docusaurus/plugin-client-redirects": "^3.4.0", + "@docusaurus/plugin-google-tag-manager": "^3.2.0", "@docusaurus/preset-classic": "^3.2.0", "@easyops-cn/docusaurus-search-local": "^0.44.3", "@mdx-js/react": "^3.0.1", diff --git a/docs/src/theme/Footer.js b/docs/src/theme/Footer.js index d2a56bf5a9ba..b7b2af3d4e31 100644 --- a/docs/src/theme/Footer.js +++ b/docs/src/theme/Footer.js @@ -37,7 +37,7 @@ export default function FooterWrapper(props) { const mendableFloatingButton = React.createElement(MendableFloatingButton, { floatingButtonStyle: { color: "#000000", backgroundColor: "#f6f6f6" }, - anon_key: 'b7f52734-297c-41dc-8737-edbd13196394', // Mendable Search Public ANON key, ok to be public + anon_key: customFields.mendableAnonKey, showSimpleSearch: true, icon: icon, }); diff --git a/docs/src/theme/SearchBar.js b/docs/src/theme/SearchBar.js index dcca0c945dbe..78ee36c05d53 100644 --- a/docs/src/theme/SearchBar.js +++ b/docs/src/theme/SearchBar.js @@ -2,4 +2,22 @@ // If you swizzled this, it is your responsibility to provide an implementation // Tip: swizzle the SearchBar from the Algolia theme for inspiration: // npm run swizzle @docusaurus/theme-search-algolia SearchBar -export {default} from '@docusaurus/Noop'; +import React from 'react' +import { MendableSearchBar } from '@mendable/search' +import useDocusaurusContext from '@docusaurus/useDocusaurusContext' + +export default function SearchBarWrapper() { + const { + siteConfig: { customFields }, + } = useDocusaurusContext() + return ( +
+ +
+ ) +} diff --git a/docs/static/files/Google_Drive_Docs_Translations_Example.json b/docs/static/files/Google_Drive_Docs_Translations_Example.json new file mode 100644 index 000000000000..21addff7466a --- /dev/null +++ b/docs/static/files/Google_Drive_Docs_Translations_Example.json @@ -0,0 +1 @@ +{"id":"d0ff7355-fae7-411d-a7a4-16f82120fdfe","data":{"nodes":[{"id":"ParseData-U2bvS","type":"genericNode","position":{"x":484.9402084930625,"y":225.83768098401197},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_metadata":true,"list":false,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"data","value":"","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sep","value":"\n","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{text}","display_name":"Template","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"ParseData-U2bvS"},"selected":false,"width":384,"height":353,"positionAbsolute":{"x":484.9402084930625,"y":225.83768098401197},"dragging":false},{"id":"GoogleDriveComponent-yTOyB","type":"genericNode","position":{"x":893.2669087441287,"y":217.75489044938584},"data":{"type":"GoogleDriveComponent","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import json\nfrom json.decoder import JSONDecodeError\n\nfrom google.auth.exceptions import RefreshError\nfrom google.oauth2.credentials import Credentials\nfrom langchain_google_community import GoogleDriveLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import docs_to_data\nfrom langflow.inputs import MessageTextInput\nfrom langflow.io import SecretStrInput\nfrom langflow.schema import Data\nfrom langflow.template import Output\n\n\nclass GoogleDriveComponent(Component):\n display_name = \"Google Drive Loader\"\n description = \"Loads documents from Google Drive using provided credentials.\"\n icon = \"Google\"\n\n inputs = [\n SecretStrInput(\n name=\"json_string\",\n display_name=\"JSON String of the Service Account Token\",\n info=\"JSON string containing OAuth 2.0 access token information for service account access\",\n required=True,\n ),\n MessageTextInput(\n name=\"document_id\", display_name=\"Document ID\", info=\"Single Google Drive document ID\", required=True\n ),\n ]\n\n outputs = [\n Output(display_name=\"Loaded Documents\", name=\"docs\", method=\"load_documents\"),\n ]\n\n def load_documents(self) -> Data:\n class CustomGoogleDriveLoader(GoogleDriveLoader):\n creds: Credentials | None = None\n \"\"\"Credentials object to be passed directly.\"\"\"\n\n def _load_credentials(self):\n \"\"\"Load credentials from the provided creds attribute or fallback to the original method.\"\"\"\n if self.creds:\n return self.creds\n msg = \"No credentials provided.\"\n raise ValueError(msg)\n\n class Config:\n arbitrary_types_allowed = True\n\n json_string = self.json_string\n\n document_ids = [self.document_id]\n if len(document_ids) != 1:\n msg = \"Expected a single document ID\"\n raise ValueError(msg)\n\n # TODO: Add validation to check if the document ID is valid\n\n # Load the token information from the JSON string\n try:\n token_info = json.loads(json_string)\n except JSONDecodeError as e:\n msg = \"Invalid JSON string\"\n raise ValueError(msg) from e\n\n # Initialize the custom loader with the provided credentials and document IDs\n loader = CustomGoogleDriveLoader(\n creds=Credentials.from_authorized_user_info(token_info), document_ids=document_ids\n )\n\n # Load the documents\n try:\n docs = loader.load()\n # catch google.auth.exceptions.RefreshError\n except RefreshError as e:\n msg = \"Authentication error: Unable to refresh authentication token. Please try to reauthenticate.\"\n raise ValueError(msg) from e\n except Exception as e:\n msg = f\"Error loading documents: {e}\"\n raise ValueError(msg) from e\n\n assert len(docs) == 1, \"Expected a single document to be loaded.\"\n\n data = docs_to_data(docs)\n # Return the loaded documents\n self.status = data\n return Data(data={\"text\": data})\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"document_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"name":"document_id","value":"YOUR-DOCUMENT-ID-HERE","display_name":"Document ID","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Single Google Drive document ID","title_case":false,"type":"str","_input_type":"MessageTextInput"},"json_string":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"json_string","value":"","display_name":"JSON String of the Service Account Token","advanced":false,"input_types":["Message"],"dynamic":false,"info":"JSON string containing OAuth 2.0 access token information for service account access","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"Loads documents from Google Drive using provided credentials.","icon":"Google","base_classes":["Data"],"display_name":"Google Drive Loader","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"docs","display_name":"Loaded Documents","method":"load_documents","value":"__UNDEFINED__","cache":true}],"field_order":["json_string","document_id"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"GoogleDriveComponent-yTOyB"},"selected":false,"width":384,"height":389,"positionAbsolute":{"x":893.2669087441287,"y":217.75489044938584},"dragging":false},{"id":"ParseData-Fo5CI","type":"genericNode","position":{"x":1297.9704666205964,"y":232.42701869317483},"data":{"type":"ParseData","node":{"template":{"_type":"Component","data":{"trace_as_metadata":true,"list":false,"trace_as_input":true,"required":false,"placeholder":"","show":true,"name":"data","value":"","display_name":"Data","advanced":false,"input_types":["Data"],"dynamic":false,"info":"The data to convert to text.","title_case":false,"type":"other","_input_type":"DataInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"sep":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sep","value":"\n","display_name":"Separator","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"StrInput"},"template":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{text}","display_name":"Template","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.","title_case":false,"type":"str","_input_type":"MultilineInput"}},"description":"Convert Data into plain text following a specified template.","icon":"braces","base_classes":["Message"],"display_name":"Parse Data","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Text","method":"parse_data","value":"__UNDEFINED__","cache":true}],"field_order":["data","template","sep"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"ParseData-Fo5CI"},"selected":false,"width":384,"height":353,"positionAbsolute":{"x":1297.9704666205964,"y":232.42701869317483},"dragging":false},{"id":"OpenAIModel-oE0wj","type":"genericNode","position":{"x":2133.8172349265606,"y":182.2210159995695},"data":{"type":"OpenAIModel","node":{"template":{"_type":"Component","output_parser":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"output_parser","value":"","display_name":"Output Parser","advanced":true,"input_types":["OutputParser"],"dynamic":false,"info":"The parser to use to parse the output of the model","title_case":false,"type":"other","_input_type":"HandleInput"},"api_key":{"load_from_db":false,"required":false,"placeholder":"","show":true,"name":"api_key","value":"","display_name":"OpenAI API Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The OpenAI API Key to use for the OpenAI model.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key) if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Input","advanced":false,"input_types":["Message"],"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"MessageInput"},"json_mode":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"json_mode","value":false,"display_name":"JSON Mode","advanced":true,"dynamic":false,"info":"If True, it will output JSON regardless of passing a schema.","title_case":false,"type":"bool","_input_type":"BoolInput"},"max_tokens":{"trace_as_metadata":true,"range_spec":{"step_type":"float","min":0,"max":128000,"step":0.1},"list":false,"required":false,"placeholder":"","show":true,"name":"max_tokens","value":"","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"The maximum number of tokens to generate. Set to 0 for unlimited tokens.","title_case":false,"type":"int","_input_type":"IntInput"},"model_kwargs":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"model_kwargs","value":{},"display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false,"type":"dict","_input_type":"DictInput"},"model_name":{"trace_as_metadata":true,"options":["gpt-4o-mini","gpt-4o","gpt-4-turbo","gpt-4-turbo-preview","gpt-4","gpt-3.5-turbo","gpt-3.5-turbo-0125"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"model_name","value":"gpt-4o-mini","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"str","_input_type":"DropdownInput"},"openai_api_base":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"openai_api_base","value":"","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"type":"str","_input_type":"StrInput"},"output_schema":{"trace_as_input":true,"list":true,"required":false,"placeholder":"","show":true,"name":"output_schema","value":{},"display_name":"Schema","advanced":true,"dynamic":false,"info":"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.","title_case":false,"type":"dict","_input_type":"DictInput"},"seed":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"seed","value":1,"display_name":"Seed","advanced":true,"dynamic":false,"info":"The seed controls the reproducibility of the job.","title_case":false,"type":"int","_input_type":"IntInput"},"stream":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"stream","value":false,"display_name":"Stream","advanced":true,"dynamic":false,"info":"Stream the response from the model. Streaming works only in Chat.","title_case":false,"type":"bool","_input_type":"BoolInput"},"system_message":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"system_message","value":"","display_name":"System Message","advanced":true,"input_types":["Message"],"dynamic":false,"info":"System message to pass to the model.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"temperature":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"temperature","value":0.1,"display_name":"Temperature","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"float","_input_type":"FloatInput"}},"description":"Generates text using OpenAI LLMs.","icon":"OpenAI","base_classes":["LanguageModel","Message"],"display_name":"OpenAI","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text_output","display_name":"Text","method":"text_response","value":"__UNDEFINED__","cache":true,"required_inputs":["input_value","stream","system_message"]},{"types":["LanguageModel"],"selected":"LanguageModel","name":"model_output","display_name":"Language Model","method":"build_model","value":"__UNDEFINED__","cache":true,"required_inputs":["api_key","json_mode","max_tokens","model_kwargs","model_name","openai_api_base","output_schema","seed","temperature"]}],"field_order":["input_value","system_message","stream","max_tokens","model_kwargs","json_mode","output_schema","model_name","openai_api_base","api_key","temperature","seed","output_parser"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"OpenAIModel-oE0wj"},"selected":false,"width":384,"height":587,"positionAbsolute":{"x":2133.8172349265606,"y":182.2210159995695},"dragging":false},{"id":"Prompt-6GBpW","type":"genericNode","position":{"x":1716.4755993278068,"y":221.09946248488012},"data":{"type":"Prompt","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"template":{"trace_as_input":true,"list":false,"required":false,"placeholder":"","show":true,"name":"template","value":"{context}\n\nTranslate the text you receive into Spanish!","display_name":"Template","advanced":false,"dynamic":false,"info":"","title_case":false,"type":"prompt","_input_type":"PromptInput"},"context":{"field_type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":true,"value":"","fileTypes":[],"file_path":"","name":"context","display_name":"context","advanced":false,"input_types":["Message","Text"],"dynamic":false,"info":"","load_from_db":false,"title_case":false,"type":"str"}},"description":"Create a prompt template with dynamic variables.","icon":"prompts","is_input":null,"is_output":null,"is_composition":null,"base_classes":["Message"],"name":"","display_name":"Prompt","documentation":"","custom_fields":{"template":["context"]},"output_types":[],"full_path":null,"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"prompt","hidden":null,"display_name":"Prompt Message","method":"build_prompt","value":"__UNDEFINED__","cache":true,"required_inputs":null}],"field_order":["template"],"beta":false,"error":null,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"Prompt-6GBpW"},"selected":false,"width":384,"height":391,"positionAbsolute":{"x":1716.4755993278068,"y":221.09946248488012},"dragging":false},{"id":"ChatOutput-CJyAq","type":"genericNode","position":{"x":2546.1883300096442,"y":346.06258013037495},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"data_template","value":"{text}","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"AI","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.19.post1"},"id":"ChatOutput-CJyAq"},"selected":false,"width":384,"height":289,"positionAbsolute":{"x":2546.1883300096442,"y":346.06258013037495},"dragging":false},{"id":"note-5pHk3","type":"noteNode","position":{"x":-606.6654615534108,"y":34.012614295538015},"data":{"node":{"description":"**Google Drive Example Scopes**\n\n**Langflow - OAuth Integration Documentation**\n\n```\ndocs.langflow.org/integrations-setup-google-oauth-langflow\n```\n\n**Drive API Documentation**\n\nhttps://developers.google.com/drive/api/guides/api-specific-auth\n\n**Scope Used in This Example**\n\nPermission to view and download all your Drive files.\n\n```\nhttps://www.googleapis.com/auth/drive.readonly\n```\n\n**Example of How to Enter Scopes**\n\n```\nhttps://www.googleapis.com/auth/drive.apps.readonly, https://www.googleapis.com/auth/drive, https://www.googleapis.com/auth/drive.readonly, https://www.googleapis.com/auth/drive.activity\n```","display_name":"","documentation":"","template":{"backgroundColor":"indigo"}},"type":"note","id":"note-5pHk3"},"width":600,"height":631,"selected":false,"dragging":false,"positionAbsolute":{"x":-606.6654615534108,"y":34.012614295538015},"style":{"width":600,"height":631},"resizing":false},{"id":"GoogleOAuthJSONToken-wqXNt","type":"genericNode","position":{"x":9.33468520106726,"y":206.4834360253384},"data":{"type":"GoogleOAuthToken","node":{"template":{"_type":"Component","oauth_credentials":{"trace_as_metadata":true,"file_path":"","fileTypes":["json"],"list":false,"required":true,"placeholder":"","show":true,"name":"oauth_credentials","value":"","display_name":"Credentials File","advanced":false,"dynamic":false,"info":"Input OAuth Credentials file. (e.g. credentials.json)","title_case":false,"type":"file","_input_type":"FileInput","load_from_db":false},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.io import FileInput, Output\nfrom langflow.schema import Data\nfrom google_auth_oauthlib.flow import InstalledAppFlow\nfrom google.oauth2.credentials import Credentials\nfrom google.auth.transport.requests import Request \nimport json\nimport os\nimport re\n\n\nclass GoogleOAuthToken(Component):\n display_name = \"Google OAuth Token \"\n description = \"A component to generate a json string containing your Google OAuth token.\"\n documentation: str = \"https://developers.google.com/identity/protocols/oauth2/web-server?hl=pt-br#python_1\"\n icon = \"Google\"\n name = \"GoogleOAuthToken\"\n\n inputs = [\n StrInput(\n name=\"scopes\",\n display_name=\"Scopes\",\n info=\"Input a comma-separated list of scopes with the permissions required for your application.\",\n required=True\n ),\n FileInput(\n name=\"oauth_credentials\",\n display_name=\"Credentials File\",\n info=\"Input OAuth Credentials file. (e.g. credentials.json)\",\n file_types=[\"json\"],\n required=True\n ),\n ]\n\n outputs = [\n Output(display_name=\"Output\", name=\"output\", method=\"build_output\"),\n ]\n\n def validate_scopes(self, scopes):\n pattern = (\n r\"^(https:\\/\\/(www\\.googleapis\\.com\\/auth\\/[\\w\\.\\-]+\"\n r\"|mail\\.google\\.com\\/\"\n r\"|www\\.google\\.com\\/calendar\\/feeds\"\n r\"|www\\.google\\.com\\/m8\\/feeds))\"\n r\"(,\\s*https:\\/\\/(www\\.googleapis\\.com\\/auth\\/[\\w\\.\\-]+\"\n r\"|mail\\.google\\.com\\/\"\n r\"|www\\.google\\.com\\/calendar\\/feeds\"\n r\"|www\\.google\\.com\\/m8\\/feeds))*$\"\n )\n if not re.match(pattern, scopes):\n raise ValueError(\n \"Invalid format for scopes. Please ensure scopes are comma-separated, without quotes, and without extra characters. Also, check if each URL is correct.\"\n )\n\n def build_output(self) -> Data:\n self.validate_scopes(self.scopes)\n\n\n user_scopes = [scope.strip() for scope in self.scopes.split(',')]\n if self.scopes:\n SCOPES = user_scopes \n else:\n raise ValueError(\"Incorrect Scope, check if you filled in the scopes field correctly!\")\n\n creds = None\n token_path = 'token.json' \n\n if os.path.exists(token_path):\n with open(token_path, 'r') as token_file:\n creds = Credentials.from_authorized_user_file(token_path, SCOPES)\n\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n if self.oauth_credentials:\n CLIENT_SECRET_FILE = self.oauth_credentials\n else:\n raise ValueError(\"Oauth 2.0 Credentials file not provided. (e.g. the credentials.json)\")\n \n flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)\n creds = flow.run_local_server(port=0)\n\n with open(token_path, 'w') as token_file:\n token_file.write(creds.to_json())\n\n return creds.to_json()\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"scopes":{"trace_as_metadata":true,"load_from_db":false,"list":false,"required":true,"placeholder":"","show":true,"name":"scopes","value":"https://www.googleapis.com/auth/drive.readonly","display_name":"Scopes","advanced":false,"dynamic":false,"info":"Input a comma-separated list of scopes with the permissions required for your application.","title_case":false,"type":"str","_input_type":"StrInput"}},"description":"A component to generate a json string containing your Google OAuth token.","icon":"Google","base_classes":["Data"],"display_name":"Google OAuth Token","documentation":"https://developers.google.com/identity/protocols/oauth2/web-server?hl=pt-br#python_1","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Data"],"selected":"Data","name":"output","display_name":"Output","method":"build_output","value":"__UNDEFINED__","cache":true}],"field_order":["scopes","oauth_credentials"],"beta":false,"edited":true,"metadata":{}},"id":"GoogleOAuthJSONToken-wqXNt"},"selected":true,"width":384,"height":391,"positionAbsolute":{"x":9.33468520106726,"y":206.4834360253384},"dragging":false}],"edges":[{"source":"ParseData-U2bvS","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-U2bvSœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"GoogleDriveComponent-yTOyB","targetHandle":"{œfieldNameœ:œjson_stringœ,œidœ:œGoogleDriveComponent-yTOyBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"json_string","id":"GoogleDriveComponent-yTOyB","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-U2bvS","name":"text","output_types":["Message"]}},"id":"reactflow__edge-ParseData-U2bvS{œdataTypeœ:œParseDataœ,œidœ:œParseData-U2bvSœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-GoogleDriveComponent-yTOyB{œfieldNameœ:œjson_stringœ,œidœ:œGoogleDriveComponent-yTOyBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","animated":false,"className":""},{"source":"GoogleDriveComponent-yTOyB","sourceHandle":"{œdataTypeœ:œGoogleDriveComponentœ,œidœ:œGoogleDriveComponent-yTOyBœ,œnameœ:œdocsœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-Fo5CI","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-Fo5CIœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-Fo5CI","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"GoogleDriveComponent","id":"GoogleDriveComponent-yTOyB","name":"docs","output_types":["Data"]}},"id":"reactflow__edge-GoogleDriveComponent-yTOyB{œdataTypeœ:œGoogleDriveComponentœ,œidœ:œGoogleDriveComponent-yTOyBœ,œnameœ:œdocsœ,œoutput_typesœ:[œDataœ]}-ParseData-Fo5CI{œfieldNameœ:œdataœ,œidœ:œParseData-Fo5CIœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","animated":false,"className":""},{"source":"Prompt-6GBpW","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-6GBpWœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"OpenAIModel-oE0wj","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-oE0wjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"OpenAIModel-oE0wj","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-6GBpW","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-6GBpW{œdataTypeœ:œPromptœ,œidœ:œPrompt-6GBpWœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-oE0wj{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-oE0wjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","animated":false,"className":""},{"source":"ParseData-Fo5CI","sourceHandle":"{œdataTypeœ:œParseDataœ,œidœ:œParseData-Fo5CIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-6GBpW","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-6GBpWœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"context","id":"Prompt-6GBpW","inputTypes":["Message","Text"],"type":"str"},"sourceHandle":{"dataType":"ParseData","id":"ParseData-Fo5CI","name":"text","output_types":["Message"]}},"id":"reactflow__edge-ParseData-Fo5CI{œdataTypeœ:œParseDataœ,œidœ:œParseData-Fo5CIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-6GBpW{œfieldNameœ:œcontextœ,œidœ:œPrompt-6GBpWœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","animated":false,"className":""},{"source":"OpenAIModel-oE0wj","sourceHandle":"{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-oE0wjœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-CJyAq","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-CJyAqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-CJyAq","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"OpenAIModel","id":"OpenAIModel-oE0wj","name":"text_output","output_types":["Message"]}},"id":"reactflow__edge-OpenAIModel-oE0wj{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-oE0wjœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-CJyAq{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-CJyAqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","animated":false,"className":""},{"source":"GoogleOAuthJSONToken-wqXNt","sourceHandle":"{œdataTypeœ:œGoogleOAuthTokenœ,œidœ:œGoogleOAuthJSONToken-wqXNtœ,œnameœ:œoutputœ,œoutput_typesœ:[œDataœ]}","target":"ParseData-U2bvS","targetHandle":"{œfieldNameœ:œdataœ,œidœ:œParseData-U2bvSœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","data":{"targetHandle":{"fieldName":"data","id":"ParseData-U2bvS","inputTypes":["Data"],"type":"other"},"sourceHandle":{"dataType":"GoogleOAuthToken","id":"GoogleOAuthJSONToken-wqXNt","name":"output","output_types":["Data"]}},"id":"reactflow__edge-GoogleOAuthJSONToken-wqXNt{œdataTypeœ:œGoogleOAuthTokenœ,œidœ:œGoogleOAuthJSONToken-wqXNtœ,œnameœ:œoutputœ,œoutput_typesœ:[œDataœ]}-ParseData-U2bvS{œfieldNameœ:œdataœ,œidœ:œParseData-U2bvSœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}","animated":false,"className":""}],"viewport":{"x":362.3094941500808,"y":289.9558489915596,"zoom":0.4338730252876207}},"description":"An example of a flow that connects to Google Drive to access a text document, reads the content, translates it into the desired language, and returns the translated text in the chat, allowing for quick and efficient automation of the Google Docs file translation process.","name":"Google Drive Docs Translations Example","last_tested_version":"1.0.19.post1","endpoint_name":"google_drive_docs_example","is_component":false} \ No newline at end of file diff --git a/docs/static/img/google/create-a-google-cloud-project.gif b/docs/static/img/google/create-a-google-cloud-project.gif new file mode 100644 index 000000000000..177225a6a33f Binary files /dev/null and b/docs/static/img/google/create-a-google-cloud-project.gif differ diff --git a/docs/static/img/google/create-oauth-client-id.png b/docs/static/img/google/create-oauth-client-id.png new file mode 100644 index 000000000000..380e6bee2094 Binary files /dev/null and b/docs/static/img/google/create-oauth-client-id.png differ diff --git a/docs/static/img/google/setup-oauth-consent-screen.png b/docs/static/img/google/setup-oauth-consent-screen.png new file mode 100644 index 000000000000..68bc4e600a3d Binary files /dev/null and b/docs/static/img/google/setup-oauth-consent-screen.png differ diff --git a/docs/yarn.lock b/docs/yarn.lock index 44d04f436a3f..436163a33f93 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -176,11 +176,25 @@ "@babel/highlight" "^7.24.7" picocolors "^1.0.0" +"@babel/code-frame@^7.25.9", "@babel/code-frame@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.26.2.tgz#4b5fab97d33338eff916235055f0ebc21e573a85" + integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ== + dependencies: + "@babel/helper-validator-identifier" "^7.25.9" + js-tokens "^4.0.0" + picocolors "^1.0.0" + "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.8.tgz" integrity sha512-c4IM7OTg6k1Q+AJ153e2mc2QVTezTwnb4VzquwcyiEzGnW0Kedv4do/TrkU98qPeC5LNiMt/QXwIjzYXLBpyZg== +"@babel/compat-data@^7.25.9", "@babel/compat-data@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.26.2.tgz#278b6b13664557de95b8f35b90d96785850bb56e" + integrity sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg== + "@babel/core@^7.21.3", "@babel/core@^7.23.3": version "7.24.8" resolved "https://registry.npmjs.org/@babel/core/-/core-7.24.8.tgz" @@ -202,6 +216,27 @@ json5 "^2.2.3" semver "^6.3.1" +"@babel/core@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.26.0.tgz#d78b6023cc8f3114ccf049eb219613f74a747b40" + integrity sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.26.0" + "@babel/generator" "^7.26.0" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-module-transforms" "^7.26.0" + "@babel/helpers" "^7.26.0" + "@babel/parser" "^7.26.0" + "@babel/template" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.26.0" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + "@babel/generator@^7.23.3", "@babel/generator@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.24.8.tgz" @@ -212,6 +247,17 @@ "@jridgewell/trace-mapping" "^0.3.25" jsesc "^2.5.1" +"@babel/generator@^7.25.9", "@babel/generator@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.26.2.tgz#87b75813bec87916210e5e01939a4c823d6bb74f" + integrity sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw== + dependencies: + "@babel/parser" "^7.26.2" + "@babel/types" "^7.26.0" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + jsesc "^3.0.2" + "@babel/helper-annotate-as-pure@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz" @@ -219,6 +265,13 @@ dependencies: "@babel/types" "^7.24.7" +"@babel/helper-annotate-as-pure@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.25.9.tgz#d8eac4d2dc0d7b6e11fa6e535332e0d3184f06b4" + integrity sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g== + dependencies: + "@babel/types" "^7.25.9" + "@babel/helper-builder-binary-assignment-operator-visitor@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz" @@ -227,6 +280,14 @@ "@babel/traverse" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/helper-builder-binary-assignment-operator-visitor@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.25.9.tgz#f41752fe772a578e67286e6779a68a5a92de1ee9" + integrity sha512-C47lC7LIDCnz0h4vai/tpNOI95tCd5ZT3iBt/DBH5lXKHZsyNQv18yf1wIIg2ntiQNgmAvA+DgZ82iW8Qdym8g== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helper-compilation-targets@^7.22.6", "@babel/helper-compilation-targets@^7.24.7", "@babel/helper-compilation-targets@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.8.tgz" @@ -238,6 +299,17 @@ lru-cache "^5.1.1" semver "^6.3.1" +"@babel/helper-compilation-targets@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz#55af025ce365be3cdc0c1c1e56c6af617ce88875" + integrity sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ== + dependencies: + "@babel/compat-data" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + browserslist "^4.24.0" + lru-cache "^5.1.1" + semver "^6.3.1" + "@babel/helper-create-class-features-plugin@^7.24.7", "@babel/helper-create-class-features-plugin@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.8.tgz" @@ -253,6 +325,19 @@ "@babel/helper-split-export-declaration" "^7.24.7" semver "^6.3.1" +"@babel/helper-create-class-features-plugin@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.9.tgz#7644147706bb90ff613297d49ed5266bde729f83" + integrity sha512-UTZQMvt0d/rSz6KI+qdu7GQze5TIajwTS++GUozlw8VBJDEOAqSXwm1WvmYEZwqdqSGQshRocPDqrt4HBZB3fQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-member-expression-to-functions" "^7.25.9" + "@babel/helper-optimise-call-expression" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/traverse" "^7.25.9" + semver "^6.3.1" + "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.24.7.tgz" @@ -262,6 +347,15 @@ regexpu-core "^5.3.1" semver "^6.3.1" +"@babel/helper-create-regexp-features-plugin@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.9.tgz#3e8999db94728ad2b2458d7a470e7770b7764e26" + integrity sha512-ORPNZ3h6ZRkOyAa/SaHU+XsLZr0UQzRwuDQ0cczIA17nAzZ+85G5cVkOJIj7QavLZGSe8QXUmNFxSZzjcZF9bw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + regexpu-core "^6.1.1" + semver "^6.3.1" + "@babel/helper-define-polyfill-provider@^0.6.1", "@babel/helper-define-polyfill-provider@^0.6.2": version "0.6.2" resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz" @@ -303,6 +397,14 @@ "@babel/traverse" "^7.24.8" "@babel/types" "^7.24.8" +"@babel/helper-member-expression-to-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.25.9.tgz#9dfffe46f727005a5ea29051ac835fb735e4c1a3" + integrity sha512-wbfdZ9w5vk0C0oyHqAJbc62+vet5prjj01jjJ8sKn3j9h3MQQlflEdXYvuqRWjHnM12coDEqiC1IRCi0U/EKwQ== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helper-module-imports@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz" @@ -311,6 +413,14 @@ "@babel/traverse" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/helper-module-imports@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz#e7f8d20602ebdbf9ebbea0a0751fb0f2a4141715" + integrity sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helper-module-transforms@^7.24.7", "@babel/helper-module-transforms@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.8.tgz" @@ -322,6 +432,15 @@ "@babel/helper-split-export-declaration" "^7.24.7" "@babel/helper-validator-identifier" "^7.24.7" +"@babel/helper-module-transforms@^7.25.9", "@babel/helper-module-transforms@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz#8ce54ec9d592695e58d84cd884b7b5c6a2fdeeae" + integrity sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw== + dependencies: + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/helper-optimise-call-expression@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz" @@ -329,11 +448,23 @@ dependencies: "@babel/types" "^7.24.7" +"@babel/helper-optimise-call-expression@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.25.9.tgz#3324ae50bae7e2ab3c33f60c9a877b6a0146b54e" + integrity sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ== + dependencies: + "@babel/types" "^7.25.9" + "@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.24.7", "@babel/helper-plugin-utils@^7.24.8", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz" integrity sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg== +"@babel/helper-plugin-utils@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz#9cbdd63a9443a2c92a725cca7ebca12cc8dd9f46" + integrity sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw== + "@babel/helper-remap-async-to-generator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.24.7.tgz" @@ -343,6 +474,15 @@ "@babel/helper-environment-visitor" "^7.24.7" "@babel/helper-wrap-function" "^7.24.7" +"@babel/helper-remap-async-to-generator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.9.tgz#e53956ab3d5b9fb88be04b3e2f31b523afd34b92" + integrity sha512-IZtukuUeBbhgOcaW2s06OXTzVNJR0ybm4W5xC1opWFFJMZbwRj5LCk+ByYH7WdZPZTt8KnFwA8pvjN2yqcPlgw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-wrap-function" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/helper-replace-supers@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.7.tgz" @@ -352,6 +492,15 @@ "@babel/helper-member-expression-to-functions" "^7.24.7" "@babel/helper-optimise-call-expression" "^7.24.7" +"@babel/helper-replace-supers@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.25.9.tgz#ba447224798c3da3f8713fc272b145e33da6a5c5" + integrity sha512-IiDqTOTBQy0sWyeXyGSC5TBJpGFXBkRynjBeXsvbhQFKj2viwJC76Epz35YLU1fpe/Am6Vppb7W7zM4fPQzLsQ== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.25.9" + "@babel/helper-optimise-call-expression" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/helper-simple-access@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz" @@ -360,6 +509,14 @@ "@babel/traverse" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/helper-simple-access@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.25.9.tgz#6d51783299884a2c74618d6ef0f86820ec2e7739" + integrity sha512-c6WHXuiaRsJTyHYLJV75t9IqsmTbItYfdj99PnzYGQZkYKvan5/2jKJ7gu31J3/BJ/A18grImSPModuyG/Eo0Q== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz" @@ -368,6 +525,14 @@ "@babel/traverse" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/helper-skip-transparent-expression-wrappers@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.25.9.tgz#0b2e1b62d560d6b1954893fd2b705dc17c91f0c9" + integrity sha512-K4Du3BFa3gvyhzgPcntrkDgZzQaq6uozzcpGbOO1OEJaI+EJdqWIMTLgFgQf6lrfiDFo5FU+BxKepI9RmZqahA== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helper-split-export-declaration@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz" @@ -380,16 +545,31 @@ resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz" integrity sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ== +"@babel/helper-string-parser@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz#1aabb72ee72ed35789b4bbcad3ca2862ce614e8c" + integrity sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA== + "@babel/helper-validator-identifier@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz" integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w== +"@babel/helper-validator-identifier@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz#24b64e2c3ec7cd3b3c547729b8d16871f22cbdc7" + integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ== + "@babel/helper-validator-option@^7.24.7", "@babel/helper-validator-option@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz" integrity sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q== +"@babel/helper-validator-option@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz#86e45bd8a49ab7e03f276577f96179653d41da72" + integrity sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw== + "@babel/helper-wrap-function@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.24.7.tgz" @@ -400,6 +580,15 @@ "@babel/traverse" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/helper-wrap-function@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.25.9.tgz#d99dfd595312e6c894bd7d237470025c85eea9d0" + integrity sha512-ETzz9UTjQSTmw39GboatdymDq4XIQbR8ySgVrylRhPOFpsd+JrKHIuF0de7GCWmem+T4uC5z7EZguod7Wj4A4g== + dependencies: + "@babel/template" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/helpers@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.8.tgz" @@ -408,6 +597,14 @@ "@babel/template" "^7.24.7" "@babel/types" "^7.24.8" +"@babel/helpers@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.26.0.tgz#30e621f1eba5aa45fe6f4868d2e9154d884119a4" + integrity sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw== + dependencies: + "@babel/template" "^7.25.9" + "@babel/types" "^7.26.0" + "@babel/highlight@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz" @@ -423,6 +620,13 @@ resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.24.8.tgz" integrity sha512-WzfbgXOkGzZiXXCqk43kKwZjzwx4oulxZi3nq2TYL9mOjQv6kYwul9mz6ID36njuL7Xkp6nJEfok848Zj10j/w== +"@babel/parser@^7.25.9", "@babel/parser@^7.26.0", "@babel/parser@^7.26.2": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.26.2.tgz#fd7b6f487cfea09889557ef5d4eeb9ff9a5abd11" + integrity sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ== + dependencies: + "@babel/types" "^7.26.0" + "@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.24.7.tgz" @@ -431,6 +635,21 @@ "@babel/helper-environment-visitor" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.9.tgz#cc2e53ebf0a0340777fff5ed521943e253b4d8fe" + integrity sha512-ZkRyVkThtxQ/J6nv3JFYv1RYY+JT5BvU0y3k5bWrmuG4woXypRa4PXmm9RhOwodRkYFWqC0C0cqcJ4OqR7kW+g== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" + +"@babel/plugin-bugfix-safari-class-field-initializer-scope@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.9.tgz#af9e4fb63ccb8abcb92375b2fcfe36b60c774d30" + integrity sha512-MrGRLZxLD/Zjj0gdU15dfs+HH/OXvnw/U4jJD8vpcP2CJQapPEv1IWwjc/qMg7ItBlPwSv1hRBbb7LeuANdcnw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.7.tgz" @@ -438,6 +657,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.9.tgz#e8dc26fcd616e6c5bf2bd0d5a2c151d4f92a9137" + integrity sha512-2qUwwfAFpJLZqxd02YW9btUCZHl+RFvdDkNfZwaIJrvB8Tesjsk8pEQkTvGwZXLqXUx/2oyY3ySRhm6HOXuCug== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz" @@ -447,6 +673,15 @@ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" "@babel/plugin-transform-optional-chaining" "^7.24.7" +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.25.9.tgz#807a667f9158acac6f6164b4beb85ad9ebc9e1d1" + integrity sha512-6xWgLZTJXwilVjlnV7ospI3xi+sl8lN8rXXbBD6vYn3UYDlGsag8wrZkKcSI8G6KgqKP7vNFaDgeDnfAABq61g== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-transform-optional-chaining" "^7.25.9" + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.7.tgz" @@ -455,6 +690,14 @@ "@babel/helper-environment-visitor" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.9.tgz#de7093f1e7deaf68eadd7cc6b07f2ab82543269e" + integrity sha512-aLnMXYPnzwwqhYSCyXfKkIkYgJ8zv9RK+roo9DkTXz38ynIhd9XCbN08s3MGvqL2MYGVUGdRQLL/JqBIeJhJBg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": version "7.21.0-placeholder-for-preset-env.2" resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz" @@ -502,6 +745,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-syntax-import-assertions@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.26.0.tgz#620412405058efa56e4a564903b79355020f445f" + integrity sha512-QCWT5Hh830hK5EQa7XzuqIkQU9tT/whqbDz7kuaZMHFl1inRRg7JnuAEOQ0Ur0QUl0NufCk1msK2BeY79Aj/eg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-import-attributes@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz" @@ -509,6 +759,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-syntax-import-attributes@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz#3b1412847699eea739b4f2602c74ce36f6b0b0f7" + integrity sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-import-meta@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz" @@ -530,6 +787,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-syntax-jsx@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz#a34313a178ea56f1951599b929c1ceacee719290" + integrity sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" @@ -593,6 +857,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-syntax-typescript@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz#67dda2b74da43727cf21d46cf9afef23f4365399" + integrity sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz" @@ -608,6 +879,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-arrow-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.25.9.tgz#7821d4410bee5daaadbb4cdd9a6649704e176845" + integrity sha512-6jmooXYIwn9ca5/RylZADJ+EnSxVUS5sjeJ9UPk6RWRzXCmOJCy6dqItPJFpw2cuCangPK4OYr5uhGKcmrm5Qg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-async-generator-functions@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.7.tgz" @@ -618,6 +896,15 @@ "@babel/helper-remap-async-to-generator" "^7.24.7" "@babel/plugin-syntax-async-generators" "^7.8.4" +"@babel/plugin-transform-async-generator-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.9.tgz#1b18530b077d18a407c494eb3d1d72da505283a2" + integrity sha512-RXV6QAzTBbhDMO9fWwOmwwTuYaiPbggWQ9INdZqAYeSHyG7FzQ+nOZaUUjNwKv9pV3aE4WFqFm1Hnbci5tBCAw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-remap-async-to-generator" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/plugin-transform-async-to-generator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz" @@ -627,6 +914,15 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/helper-remap-async-to-generator" "^7.24.7" +"@babel/plugin-transform-async-to-generator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.25.9.tgz#c80008dacae51482793e5a9c08b39a5be7e12d71" + integrity sha512-NT7Ejn7Z/LjUH0Gv5KsBCxh7BH3fbLTV0ptHvpeMvrt3cPThHfJfst9Wrb7S8EvJ7vRTFI7z+VAvFVEQn/m5zQ== + dependencies: + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-remap-async-to-generator" "^7.25.9" + "@babel/plugin-transform-block-scoped-functions@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz" @@ -634,6 +930,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-block-scoped-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.25.9.tgz#5700691dbd7abb93de300ca7be94203764fce458" + integrity sha512-toHc9fzab0ZfenFpsyYinOX0J/5dgJVA2fm64xPewu7CoYHWEivIWKxkK2rMi4r3yQqLnVmheMXRdG+k239CgA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-block-scoping@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.7.tgz" @@ -641,6 +944,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-block-scoping@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.9.tgz#c33665e46b06759c93687ca0f84395b80c0473a1" + integrity sha512-1F05O7AYjymAtqbsFETboN1NvBdcnzMerO+zlMyJBEz6WkMdejvGWw9p05iTSjC85RLlBseHHQpYaM4gzJkBGg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-class-properties@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.7.tgz" @@ -649,6 +959,14 @@ "@babel/helper-create-class-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-class-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.25.9.tgz#a8ce84fedb9ad512549984101fa84080a9f5f51f" + integrity sha512-bbMAII8GRSkcd0h0b4X+36GksxuheLFjP65ul9w6C3KgAamI3JqErNgSrosX6ZPj+Mpim5VvEbawXxJCyEUV3Q== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-class-static-block@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz" @@ -658,6 +976,14 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-class-static-block" "^7.14.5" +"@babel/plugin-transform-class-static-block@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.26.0.tgz#6c8da219f4eb15cae9834ec4348ff8e9e09664a0" + integrity sha512-6J2APTs7BDDm+UMqP1useWqhcRAXo0WIoVj26N7kPFB6S73Lgvyka4KTZYIxtgYXiN5HTyRObA72N2iu628iTQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-classes@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.8.tgz" @@ -672,6 +998,18 @@ "@babel/helper-split-export-declaration" "^7.24.7" globals "^11.1.0" +"@babel/plugin-transform-classes@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.9.tgz#7152457f7880b593a63ade8a861e6e26a4469f52" + integrity sha512-mD8APIXmseE7oZvZgGABDyM34GUmK45Um2TXiBUt7PnuAxrgoSVf123qUzPxEr/+/BHrRn5NMZCdE2m/1F8DGg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" + "@babel/traverse" "^7.25.9" + globals "^11.1.0" + "@babel/plugin-transform-computed-properties@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz" @@ -680,6 +1018,14 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/template" "^7.24.7" +"@babel/plugin-transform-computed-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.25.9.tgz#db36492c78460e534b8852b1d5befe3c923ef10b" + integrity sha512-HnBegGqXZR12xbcTHlJ9HGxw1OniltT26J5YpfruGqtUHlz/xKf/G2ak9e+t0rVqrjXa9WOhvYPz1ERfMj23AA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/template" "^7.25.9" + "@babel/plugin-transform-destructuring@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.8.tgz" @@ -687,6 +1033,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.8" +"@babel/plugin-transform-destructuring@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.25.9.tgz#966ea2595c498224340883602d3cfd7a0c79cea1" + integrity sha512-WkCGb/3ZxXepmMiX101nnGiU+1CAdut8oHyEOHxkKuS1qKpU2SMXE2uSvfz8PBuLd49V6LEsbtyPhWC7fnkgvQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-dotall-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz" @@ -695,6 +1048,14 @@ "@babel/helper-create-regexp-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-dotall-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.25.9.tgz#bad7945dd07734ca52fe3ad4e872b40ed09bb09a" + integrity sha512-t7ZQ7g5trIgSRYhI9pIJtRl64KHotutUJsh4Eze5l7olJv+mRSg4/MmbZ0tv1eeqRbdvo/+trvJD/Oc5DmW2cA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-duplicate-keys@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz" @@ -702,6 +1063,21 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-duplicate-keys@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.25.9.tgz#8850ddf57dce2aebb4394bb434a7598031059e6d" + integrity sha512-LZxhJ6dvBb/f3x8xwWIuyiAHy56nrRG3PeYTpBkkzkYRRQ6tJLu68lEF5VIqMUZiAV7a8+Tb78nEoMCMcqjXBw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-duplicate-named-capturing-groups-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.9.tgz#6f7259b4de127721a08f1e5165b852fcaa696d31" + integrity sha512-0UfuJS0EsXbRvKnwcLjFtJy/Sxc5J5jhLHnFhy7u4zih97Hz6tJkLU+O+FMMrNZrosUPxDi6sYxJ/EA8jDiAog== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-dynamic-import@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz" @@ -710,6 +1086,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-dynamic-import" "^7.8.3" +"@babel/plugin-transform-dynamic-import@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.25.9.tgz#23e917de63ed23c6600c5dd06d94669dce79f7b8" + integrity sha512-GCggjexbmSLaFhqsojeugBpeaRIgWNTcgKVq/0qIteFEqY2A+b9QidYadrWlnbWQUrW5fn+mCvf3tr7OeBFTyg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-exponentiation-operator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz" @@ -718,6 +1101,14 @@ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-exponentiation-operator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.25.9.tgz#ece47b70d236c1d99c263a1e22b62dc20a4c8b0f" + integrity sha512-KRhdhlVk2nObA5AYa7QMgTMTVJdfHprfpAk4DjZVtllqRg9qarilstTKEhpVjyt+Npi8ThRyiV8176Am3CodPA== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-export-namespace-from@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz" @@ -726,6 +1117,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" +"@babel/plugin-transform-export-namespace-from@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.25.9.tgz#90745fe55053394f554e40584cda81f2c8a402a2" + integrity sha512-2NsEz+CxzJIVOPx2o9UsW1rXLqtChtLoVnwYHHiB04wS5sgn7mrV45fWMBX0Kk+ub9uXytVYfNP2HjbVbCB3Ww== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-for-of@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz" @@ -734,6 +1132,14 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" +"@babel/plugin-transform-for-of@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.25.9.tgz#4bdc7d42a213397905d89f02350c5267866d5755" + integrity sha512-LqHxduHoaGELJl2uhImHwRQudhCM50pT46rIBNvtT/Oql3nqiS3wOwP+5ten7NpYSXrrVLgtZU3DZmPtWZo16A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-transform-function-name@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.7.tgz" @@ -743,6 +1149,15 @@ "@babel/helper-function-name" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-function-name@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.9.tgz#939d956e68a606661005bfd550c4fc2ef95f7b97" + integrity sha512-8lP+Yxjv14Vc5MuWBpJsoUCd3hD6V9DgBon2FVYL4jJgbnVQ9fTgYmonchzZJOVNgzEgbxp4OwAf6xz6M/14XA== + dependencies: + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/plugin-transform-json-strings@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz" @@ -751,6 +1166,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-json-strings" "^7.8.3" +"@babel/plugin-transform-json-strings@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.25.9.tgz#c86db407cb827cded902a90c707d2781aaa89660" + integrity sha512-xoTMk0WXceiiIvsaquQQUaLLXSW1KJ159KP87VilruQm0LNNGxWzahxSS6T6i4Zg3ezp4vA4zuwiNUR53qmQAw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-literals@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.7.tgz" @@ -758,6 +1180,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.9.tgz#1a1c6b4d4aa59bc4cad5b6b3a223a0abd685c9de" + integrity sha512-9N7+2lFziW8W9pBl2TzaNht3+pgMIRP74zizeCSrtnSKVdUl8mAjjOP2OOVQAfZ881P2cNjDj1uAMEdeD50nuQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-logical-assignment-operators@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz" @@ -766,6 +1195,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" +"@babel/plugin-transform-logical-assignment-operators@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.25.9.tgz#b19441a8c39a2fda0902900b306ea05ae1055db7" + integrity sha512-wI4wRAzGko551Y8eVf6iOY9EouIDTtPb0ByZx+ktDGHwv6bHFimrgJM/2T021txPZ2s4c7bqvHbd+vXG6K948Q== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-member-expression-literals@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz" @@ -773,6 +1209,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-member-expression-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.25.9.tgz#63dff19763ea64a31f5e6c20957e6a25e41ed5de" + integrity sha512-PYazBVfofCQkkMzh2P6IdIUaCEWni3iYEerAsRWuVd8+jlM1S9S9cz1dF9hIzyoZ8IA3+OwVYIp9v9e+GbgZhA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-modules-amd@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz" @@ -781,6 +1224,14 @@ "@babel/helper-module-transforms" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-modules-amd@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.25.9.tgz#49ba478f2295101544abd794486cd3088dddb6c5" + integrity sha512-g5T11tnI36jVClQlMlt4qKDLlWnG5pP9CSM4GhdRciTNMRgkfpo5cR6b4rGIOYPgRRuFAvwjPQ/Yk+ql4dyhbw== + dependencies: + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-modules-commonjs@^7.24.7", "@babel/plugin-transform-modules-commonjs@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.8.tgz" @@ -790,6 +1241,15 @@ "@babel/helper-plugin-utils" "^7.24.8" "@babel/helper-simple-access" "^7.24.7" +"@babel/plugin-transform-modules-commonjs@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.25.9.tgz#d165c8c569a080baf5467bda88df6425fc060686" + integrity sha512-dwh2Ol1jWwL2MgkCzUSOvfmKElqQcuswAZypBSUsScMXvgdT8Ekq5YA6TtqpTVWH+4903NmboMuH1o9i8Rxlyg== + dependencies: + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-simple-access" "^7.25.9" + "@babel/plugin-transform-modules-systemjs@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.7.tgz" @@ -800,6 +1260,16 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/helper-validator-identifier" "^7.24.7" +"@babel/plugin-transform-modules-systemjs@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.9.tgz#8bd1b43836269e3d33307151a114bcf3ba6793f8" + integrity sha512-hyss7iIlH/zLHaehT+xwiymtPOpsiwIIRlCAOwBB04ta5Tt+lNItADdlXw3jAWZ96VJ2jlhl/c+PNIQPKNfvcA== + dependencies: + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/plugin-transform-modules-umd@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz" @@ -808,6 +1278,14 @@ "@babel/helper-module-transforms" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-modules-umd@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.25.9.tgz#6710079cdd7c694db36529a1e8411e49fcbf14c9" + integrity sha512-bS9MVObUgE7ww36HEfwe6g9WakQ0KF07mQF74uuXdkoziUPfKyu/nIm663kz//e5O1nPInPFx36z7WJmJ4yNEw== + dependencies: + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-named-capturing-groups-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz" @@ -816,6 +1294,14 @@ "@babel/helper-create-regexp-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-named-capturing-groups-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.25.9.tgz#454990ae6cc22fd2a0fa60b3a2c6f63a38064e6a" + integrity sha512-oqB6WHdKTGl3q/ItQhpLSnWWOpjUJLsOCLVyeFgeTktkBSCiurvPOsyt93gibI9CmuKvTUEtWmG5VhZD+5T/KA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-new-target@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz" @@ -823,6 +1309,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-new-target@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.25.9.tgz#42e61711294b105c248336dcb04b77054ea8becd" + integrity sha512-U/3p8X1yCSoKyUj2eOBIx3FOn6pElFOKvAAGf8HTtItuPyB+ZeOqfn+mvTtg9ZlOAjsPdK3ayQEjqHjU/yLeVQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-nullish-coalescing-operator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz" @@ -831,6 +1324,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" +"@babel/plugin-transform-nullish-coalescing-operator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.25.9.tgz#bcb1b0d9e948168102d5f7104375ca21c3266949" + integrity sha512-ENfftpLZw5EItALAD4WsY/KUWvhUlZndm5GC7G3evUsVeSJB6p0pBeLQUnRnBCBx7zV0RKQjR9kCuwrsIrjWog== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-numeric-separator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz" @@ -839,6 +1339,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-numeric-separator" "^7.10.4" +"@babel/plugin-transform-numeric-separator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.25.9.tgz#bfed75866261a8b643468b0ccfd275f2033214a1" + integrity sha512-TlprrJ1GBZ3r6s96Yq8gEQv82s8/5HnCVHtEJScUj90thHQbwe+E5MLhi2bbNHBEJuzrvltXSru+BUxHDoog7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-object-rest-spread@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz" @@ -849,6 +1356,15 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-transform-parameters" "^7.24.7" +"@babel/plugin-transform-object-rest-spread@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.25.9.tgz#0203725025074164808bcf1a2cfa90c652c99f18" + integrity sha512-fSaXafEE9CVHPweLYw4J0emp1t8zYTXyzN3UuG+lylqkvYd7RMrsOQ8TYx5RF231be0vqtFC6jnx3UmpJmKBYg== + dependencies: + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-parameters" "^7.25.9" + "@babel/plugin-transform-object-super@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz" @@ -857,6 +1373,14 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/helper-replace-supers" "^7.24.7" +"@babel/plugin-transform-object-super@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.25.9.tgz#385d5de135162933beb4a3d227a2b7e52bb4cf03" + integrity sha512-Kj/Gh+Rw2RNLbCK1VAWj2U48yxxqL2x0k10nPtSdRa0O2xnHXalD0s+o1A6a0W43gJ00ANo38jxkQreckOzv5A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" + "@babel/plugin-transform-optional-catch-binding@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz" @@ -865,6 +1389,13 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" +"@babel/plugin-transform-optional-catch-binding@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.25.9.tgz#10e70d96d52bb1f10c5caaac59ac545ea2ba7ff3" + integrity sha512-qM/6m6hQZzDcZF3onzIhZeDHDO43bkNNlOX0i8n3lR6zLbu0GN2d8qfM/IERJZYauhAHSLHy39NF0Ctdvcid7g== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-optional-chaining@^7.24.7", "@babel/plugin-transform-optional-chaining@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.8.tgz" @@ -874,6 +1405,14 @@ "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" "@babel/plugin-syntax-optional-chaining" "^7.8.3" +"@babel/plugin-transform-optional-chaining@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.25.9.tgz#e142eb899d26ef715435f201ab6e139541eee7dd" + integrity sha512-6AvV0FsLULbpnXeBjrY4dmWF8F7gf8QnvTEoO/wX/5xm/xE1Xo8oPuD3MPS+KS9f9XBEAWN7X1aWr4z9HdOr7A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-transform-parameters@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz" @@ -881,6 +1420,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-parameters@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.25.9.tgz#b856842205b3e77e18b7a7a1b94958069c7ba257" + integrity sha512-wzz6MKwpnshBAiRmn4jR8LYz/g8Ksg0o80XmwZDlordjwEk9SxBzTWC7F5ef1jhbrbOW2DJ5J6ayRukrJmnr0g== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-private-methods@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.7.tgz" @@ -889,6 +1435,14 @@ "@babel/helper-create-class-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-private-methods@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.25.9.tgz#847f4139263577526455d7d3223cd8bda51e3b57" + integrity sha512-D/JUozNpQLAPUVusvqMxyvjzllRaF8/nSrP1s2YGQT/W4LHK4xxsMcHjhOGTS01mp9Hda8nswb+FblLdJornQw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-private-property-in-object@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz" @@ -899,6 +1453,15 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/plugin-syntax-private-property-in-object" "^7.14.5" +"@babel/plugin-transform-private-property-in-object@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.25.9.tgz#9c8b73e64e6cc3cbb2743633885a7dd2c385fe33" + integrity sha512-Evf3kcMqzXA3xfYJmZ9Pg1OvKdtqsDMSWBDzZOPLvHiTt36E75jLDQo5w1gtRU95Q4E5PDttrTf25Fw8d/uWLw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-property-literals@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz" @@ -906,6 +1469,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-property-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.25.9.tgz#d72d588bd88b0dec8b62e36f6fda91cedfe28e3f" + integrity sha512-IvIUeV5KrS/VPavfSM/Iu+RE6llrHrYIKY1yfCzyO/lMXHQ+p7uGhonmGVisv6tSBSVgWzMBohTcvkC9vQcQFA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-react-constant-elements@^7.21.3": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.24.7.tgz" @@ -920,6 +1490,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-react-display-name@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.25.9.tgz#4b79746b59efa1f38c8695065a92a9f5afb24f7d" + integrity sha512-KJfMlYIUxQB1CJfO3e0+h0ZHWOTLCPP115Awhaz8U0Zpq36Gl/cXlpoyMRnUWlhNUBAzldnCiAZNvCDj7CrKxQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-react-jsx-development@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.24.7.tgz" @@ -927,6 +1504,13 @@ dependencies: "@babel/plugin-transform-react-jsx" "^7.24.7" +"@babel/plugin-transform-react-jsx-development@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.25.9.tgz#8fd220a77dd139c07e25225a903b8be8c829e0d7" + integrity sha512-9mj6rm7XVYs4mdLIpbZnHOYdpW42uoiBCTVowg7sP1thUOiANgMb4UtpRivR0pp5iL+ocvUv7X4mZgFRpJEzGw== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.25.9" + "@babel/plugin-transform-react-jsx@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.24.7.tgz" @@ -938,6 +1522,17 @@ "@babel/plugin-syntax-jsx" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/plugin-transform-react-jsx@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.25.9.tgz#06367940d8325b36edff5e2b9cbe782947ca4166" + integrity sha512-s5XwpQYCqGerXl+Pu6VDL3x0j2d82eiV77UJ8a2mDHAW7j9SWRqQ2y1fNo1Z74CdcYipl5Z41zvjj4Nfzq36rw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-jsx" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/plugin-transform-react-pure-annotations@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.7.tgz" @@ -946,6 +1541,14 @@ "@babel/helper-annotate-as-pure" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-react-pure-annotations@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.25.9.tgz#ea1c11b2f9dbb8e2d97025f43a3b5bc47e18ae62" + integrity sha512-KQ/Takk3T8Qzj5TppkS1be588lkbTp5uj7w6a0LeQaTMSckU/wK0oJ/pih+T690tkgI5jfmg2TqDJvd41Sj1Cg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-regenerator@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz" @@ -954,6 +1557,22 @@ "@babel/helper-plugin-utils" "^7.24.7" regenerator-transform "^0.15.2" +"@babel/plugin-transform-regenerator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.25.9.tgz#03a8a4670d6cebae95305ac6defac81ece77740b" + integrity sha512-vwDcDNsgMPDGP0nMqzahDWE5/MLcX8sv96+wfX7as7LoF/kr97Bo/7fI00lXY4wUXYfVmwIIyG80fGZ1uvt2qg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + regenerator-transform "^0.15.2" + +"@babel/plugin-transform-regexp-modifiers@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.26.0.tgz#2f5837a5b5cd3842a919d8147e9903cc7455b850" + integrity sha512-vN6saax7lrA2yA/Pak3sCxuD6F5InBjn9IcrIKQPjpsLvuHYLVroTxjdlVRHjjBWxKOqIwpTXDkOssYT4BFdRw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-reserved-words@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz" @@ -961,6 +1580,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-reserved-words@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.25.9.tgz#0398aed2f1f10ba3f78a93db219b27ef417fb9ce" + integrity sha512-7DL7DKYjn5Su++4RXu8puKZm2XBPHyjWLUidaPEkCUBbE7IPcsrkRHggAOOKydH1dASWdcUBxrkOGNxUv5P3Jg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-runtime@^7.22.9": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.24.7.tgz" @@ -973,6 +1599,18 @@ babel-plugin-polyfill-regenerator "^0.6.1" semver "^6.3.1" +"@babel/plugin-transform-runtime@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.25.9.tgz#62723ea3f5b31ffbe676da9d6dae17138ae580ea" + integrity sha512-nZp7GlEl+yULJrClz0SwHPqir3lc0zsPrDHQUcxGspSL7AKrexNSEfTbfqnDNJUO13bgKyfuOLMF8Xqtu8j3YQ== + dependencies: + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + babel-plugin-polyfill-corejs2 "^0.4.10" + babel-plugin-polyfill-corejs3 "^0.10.6" + babel-plugin-polyfill-regenerator "^0.6.1" + semver "^6.3.1" + "@babel/plugin-transform-shorthand-properties@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz" @@ -980,6 +1618,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-shorthand-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.25.9.tgz#bb785e6091f99f826a95f9894fc16fde61c163f2" + integrity sha512-MUv6t0FhO5qHnS/W8XCbHmiRWOphNufpE1IVxhK5kuN3Td9FT1x4rx4K42s3RYdMXCXpfWkGSbCSd0Z64xA7Ng== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-spread@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz" @@ -988,6 +1633,14 @@ "@babel/helper-plugin-utils" "^7.24.7" "@babel/helper-skip-transparent-expression-wrappers" "^7.24.7" +"@babel/plugin-transform-spread@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.25.9.tgz#24a35153931b4ba3d13cec4a7748c21ab5514ef9" + integrity sha512-oNknIB0TbURU5pqJFVbOOFspVlrpVwo2H1+HUIsVDvp5VauGGDP1ZEvO8Nn5xyMEs3dakajOxlmkNW7kNgSm6A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-transform-sticky-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz" @@ -995,6 +1648,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-sticky-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.25.9.tgz#c7f02b944e986a417817b20ba2c504dfc1453d32" + integrity sha512-WqBUSgeVwucYDP9U/xNRQam7xV8W5Zf+6Eo7T2SRVUFlhRiMNFdFz58u0KZmCVVqs2i7SHgpRnAhzRNmKfi2uA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-template-literals@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz" @@ -1002,6 +1662,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-template-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.25.9.tgz#6dbd4a24e8fad024df76d1fac6a03cf413f60fe1" + integrity sha512-o97AE4syN71M/lxrCtQByzphAdlYluKPDBzDVzMmfCobUjjhAryZV0AIpRPrxN0eAkxXO6ZLEScmt+PNhj2OTw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-typeof-symbol@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.8.tgz" @@ -1009,6 +1676,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.8" +"@babel/plugin-transform-typeof-symbol@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.25.9.tgz#224ba48a92869ddbf81f9b4a5f1204bbf5a2bc4b" + integrity sha512-v61XqUMiueJROUv66BVIOi0Fv/CUuZuZMl5NkRoCVxLAnMexZ0A3kMe7vvZ0nulxMuMp0Mk6S5hNh48yki08ZA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-typescript@^7.24.7": version "7.24.8" resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.8.tgz" @@ -1019,6 +1693,17 @@ "@babel/helper-plugin-utils" "^7.24.8" "@babel/plugin-syntax-typescript" "^7.24.7" +"@babel/plugin-transform-typescript@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.25.9.tgz#69267905c2b33c2ac6d8fe765e9dc2ddc9df3849" + integrity sha512-7PbZQZP50tzv2KGGnhh82GSyMB01yKY9scIjf1a+GfZCtInOWqUH5+1EBU4t9fyR5Oykkkc9vFTs4OHrhHXljQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-syntax-typescript" "^7.25.9" + "@babel/plugin-transform-unicode-escapes@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz" @@ -1026,6 +1711,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-unicode-escapes@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.25.9.tgz#a75ef3947ce15363fccaa38e2dd9bc70b2788b82" + integrity sha512-s5EDrE6bW97LtxOcGj1Khcx5AaXwiMmi4toFWRDP9/y0Woo6pXC+iyPu/KuhKtfSrNFd7jJB+/fkOtZy6aIC6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-unicode-property-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz" @@ -1034,6 +1726,14 @@ "@babel/helper-create-regexp-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-unicode-property-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.25.9.tgz#a901e96f2c1d071b0d1bb5dc0d3c880ce8f53dd3" + integrity sha512-Jt2d8Ga+QwRluxRQ307Vlxa6dMrYEMZCgGxoPR8V52rxPyldHu3hdlHspxaqYmE7oID5+kB+UKUB/eWS+DkkWg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-unicode-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz" @@ -1042,6 +1742,14 @@ "@babel/helper-create-regexp-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-unicode-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.25.9.tgz#5eae747fe39eacf13a8bd006a4fb0b5d1fa5e9b1" + integrity sha512-yoxstj7Rg9dlNn9UQxzk4fcNivwv4nUYz7fYXBaKxvw/lnmPuOm/ikoELygbYq68Bls3D/D+NBPHiLwZdZZ4HA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-unicode-sets-regex@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.7.tgz" @@ -1050,6 +1758,14 @@ "@babel/helper-create-regexp-features-plugin" "^7.24.7" "@babel/helper-plugin-utils" "^7.24.7" +"@babel/plugin-transform-unicode-sets-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.25.9.tgz#65114c17b4ffc20fa5b163c63c70c0d25621fabe" + integrity sha512-8BYqO3GeVNHtx69fdPshN3fnzUNLrWdHhk/icSwigksJGczKSizZ+Z6SBCxTs723Fr5VSNorTIK7a+R2tISvwQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/preset-env@^7.20.2", "@babel/preset-env@^7.22.9": version "7.24.8" resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.8.tgz" @@ -1137,6 +1853,81 @@ core-js-compat "^3.37.1" semver "^6.3.1" +"@babel/preset-env@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.26.0.tgz#30e5c6bc1bcc54865bff0c5a30f6d4ccdc7fa8b1" + integrity sha512-H84Fxq0CQJNdPFT2DrfnylZ3cf5K43rGfWK4LJGPpjKHiZlk0/RzwEus3PDDZZg+/Er7lCA03MVacueUuXdzfw== + dependencies: + "@babel/compat-data" "^7.26.0" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-bugfix-firefox-class-in-computed-class-key" "^7.25.9" + "@babel/plugin-bugfix-safari-class-field-initializer-scope" "^7.25.9" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.25.9" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.25.9" + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.25.9" + "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" + "@babel/plugin-syntax-import-assertions" "^7.26.0" + "@babel/plugin-syntax-import-attributes" "^7.26.0" + "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" + "@babel/plugin-transform-arrow-functions" "^7.25.9" + "@babel/plugin-transform-async-generator-functions" "^7.25.9" + "@babel/plugin-transform-async-to-generator" "^7.25.9" + "@babel/plugin-transform-block-scoped-functions" "^7.25.9" + "@babel/plugin-transform-block-scoping" "^7.25.9" + "@babel/plugin-transform-class-properties" "^7.25.9" + "@babel/plugin-transform-class-static-block" "^7.26.0" + "@babel/plugin-transform-classes" "^7.25.9" + "@babel/plugin-transform-computed-properties" "^7.25.9" + "@babel/plugin-transform-destructuring" "^7.25.9" + "@babel/plugin-transform-dotall-regex" "^7.25.9" + "@babel/plugin-transform-duplicate-keys" "^7.25.9" + "@babel/plugin-transform-duplicate-named-capturing-groups-regex" "^7.25.9" + "@babel/plugin-transform-dynamic-import" "^7.25.9" + "@babel/plugin-transform-exponentiation-operator" "^7.25.9" + "@babel/plugin-transform-export-namespace-from" "^7.25.9" + "@babel/plugin-transform-for-of" "^7.25.9" + "@babel/plugin-transform-function-name" "^7.25.9" + "@babel/plugin-transform-json-strings" "^7.25.9" + "@babel/plugin-transform-literals" "^7.25.9" + "@babel/plugin-transform-logical-assignment-operators" "^7.25.9" + "@babel/plugin-transform-member-expression-literals" "^7.25.9" + "@babel/plugin-transform-modules-amd" "^7.25.9" + "@babel/plugin-transform-modules-commonjs" "^7.25.9" + "@babel/plugin-transform-modules-systemjs" "^7.25.9" + "@babel/plugin-transform-modules-umd" "^7.25.9" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.25.9" + "@babel/plugin-transform-new-target" "^7.25.9" + "@babel/plugin-transform-nullish-coalescing-operator" "^7.25.9" + "@babel/plugin-transform-numeric-separator" "^7.25.9" + "@babel/plugin-transform-object-rest-spread" "^7.25.9" + "@babel/plugin-transform-object-super" "^7.25.9" + "@babel/plugin-transform-optional-catch-binding" "^7.25.9" + "@babel/plugin-transform-optional-chaining" "^7.25.9" + "@babel/plugin-transform-parameters" "^7.25.9" + "@babel/plugin-transform-private-methods" "^7.25.9" + "@babel/plugin-transform-private-property-in-object" "^7.25.9" + "@babel/plugin-transform-property-literals" "^7.25.9" + "@babel/plugin-transform-regenerator" "^7.25.9" + "@babel/plugin-transform-regexp-modifiers" "^7.26.0" + "@babel/plugin-transform-reserved-words" "^7.25.9" + "@babel/plugin-transform-shorthand-properties" "^7.25.9" + "@babel/plugin-transform-spread" "^7.25.9" + "@babel/plugin-transform-sticky-regex" "^7.25.9" + "@babel/plugin-transform-template-literals" "^7.25.9" + "@babel/plugin-transform-typeof-symbol" "^7.25.9" + "@babel/plugin-transform-unicode-escapes" "^7.25.9" + "@babel/plugin-transform-unicode-property-regex" "^7.25.9" + "@babel/plugin-transform-unicode-regex" "^7.25.9" + "@babel/plugin-transform-unicode-sets-regex" "^7.25.9" + "@babel/preset-modules" "0.1.6-no-external-plugins" + babel-plugin-polyfill-corejs2 "^0.4.10" + babel-plugin-polyfill-corejs3 "^0.10.6" + babel-plugin-polyfill-regenerator "^0.6.1" + core-js-compat "^3.38.1" + semver "^6.3.1" + "@babel/preset-modules@0.1.6-no-external-plugins": version "0.1.6-no-external-plugins" resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz" @@ -1158,6 +1949,18 @@ "@babel/plugin-transform-react-jsx-development" "^7.24.7" "@babel/plugin-transform-react-pure-annotations" "^7.24.7" +"@babel/preset-react@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.25.9.tgz#5f473035dc2094bcfdbc7392d0766bd42dce173e" + integrity sha512-D3to0uSPiWE7rBrdIICCd0tJSIGpLaaGptna2+w7Pft5xMqLpA1sz99DK5TZ1TjGbdQ/VI1eCSZ06dv3lT4JOw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-transform-react-display-name" "^7.25.9" + "@babel/plugin-transform-react-jsx" "^7.25.9" + "@babel/plugin-transform-react-jsx-development" "^7.25.9" + "@babel/plugin-transform-react-pure-annotations" "^7.25.9" + "@babel/preset-typescript@^7.21.0", "@babel/preset-typescript@^7.22.5": version "7.24.7" resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz" @@ -1169,6 +1972,17 @@ "@babel/plugin-transform-modules-commonjs" "^7.24.7" "@babel/plugin-transform-typescript" "^7.24.7" +"@babel/preset-typescript@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.26.0.tgz#4a570f1b8d104a242d923957ffa1eaff142a106d" + integrity sha512-NMk1IGZ5I/oHhoXEElcm+xUnL/szL6xflkFZmoEU9xj1qSJXpiS7rsspYo92B4DRCDvZn2erT5LdsCeXAKNCkg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-syntax-jsx" "^7.25.9" + "@babel/plugin-transform-modules-commonjs" "^7.25.9" + "@babel/plugin-transform-typescript" "^7.25.9" + "@babel/regjsgen@^0.8.0": version "0.8.0" resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz" @@ -1182,6 +1996,14 @@ core-js-pure "^3.30.2" regenerator-runtime "^0.14.0" +"@babel/runtime-corejs3@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.26.0.tgz#5af6bed16073eb4a0191233d61e158a5c768c430" + integrity sha512-YXHu5lN8kJCb1LOb9PgV6pvak43X2h4HvRApcN5SdWeaItQOzfn1hgP6jasD6KWQyJDBxrVmA9o9OivlnNJK/w== + dependencies: + core-js-pure "^3.30.2" + regenerator-runtime "^0.14.0" + "@babel/runtime@^7.1.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.22.6", "@babel/runtime@^7.8.4": version "7.24.8" resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.8.tgz" @@ -1189,6 +2011,13 @@ dependencies: regenerator-runtime "^0.14.0" +"@babel/runtime@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1" + integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/template@^7.24.7": version "7.24.7" resolved "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz" @@ -1198,6 +2027,15 @@ "@babel/parser" "^7.24.7" "@babel/types" "^7.24.7" +"@babel/template@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" + integrity sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/traverse@^7.22.8", "@babel/traverse@^7.24.7", "@babel/traverse@^7.24.8": version "7.24.8" resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.8.tgz" @@ -1214,6 +2052,19 @@ debug "^4.3.1" globals "^11.1.0" +"@babel/traverse@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.25.9.tgz#a50f8fe49e7f69f53de5bea7e413cd35c5e13c84" + integrity sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/generator" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/template" "^7.25.9" + "@babel/types" "^7.25.9" + debug "^4.3.1" + globals "^11.1.0" + "@babel/types@^7.21.3", "@babel/types@^7.24.7", "@babel/types@^7.24.8", "@babel/types@^7.4.4": version "7.24.8" resolved "https://registry.npmjs.org/@babel/types/-/types-7.24.8.tgz" @@ -1223,6 +2074,14 @@ "@babel/helper-validator-identifier" "^7.24.7" to-fast-properties "^2.0.0" +"@babel/types@^7.25.9", "@babel/types@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.26.0.tgz#deabd08d6b753bc8e0f198f8709fb575e31774ff" + integrity sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA== + dependencies: + "@babel/helper-string-parser" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@code-hike/lighter@0.7.0": version "0.7.0" resolved "https://registry.npmjs.org/@code-hike/lighter/-/lighter-0.7.0.tgz" @@ -1268,7 +2127,59 @@ "@docsearch/css" "3.6.0" algoliasearch "^4.19.1" -"@docusaurus/core@^3.2.0", "@docusaurus/core@3.4.0": +"@docusaurus/babel@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/babel/-/babel-3.6.0.tgz#735a003207925bd782dd08ffa5d8b3503c1f8d72" + integrity sha512-7CsoQFiadoq7AHSUIQNkI/lGfg9AQ2ZBzsf9BqfZGXkHwWDy6twuohEaG0PgQv1npSRSAB2dioVxhRSErnqKNA== + dependencies: + "@babel/core" "^7.25.9" + "@babel/generator" "^7.25.9" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-transform-runtime" "^7.25.9" + "@babel/preset-env" "^7.25.9" + "@babel/preset-react" "^7.25.9" + "@babel/preset-typescript" "^7.25.9" + "@babel/runtime" "^7.25.9" + "@babel/runtime-corejs3" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + babel-plugin-dynamic-import-node "^2.3.3" + fs-extra "^11.1.1" + tslib "^2.6.0" + +"@docusaurus/bundler@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/bundler/-/bundler-3.6.0.tgz#bdd060ba4d009211348e4e973a3bf4861cf0996b" + integrity sha512-o5T9HXkPKH0OQAifTxEXaebcO8kaz3tU1+wlIShZ2DKJHlsyWX3N4rToWBHroWnV/ZCT2XN3kLRzXASqrnb9Tw== + dependencies: + "@babel/core" "^7.25.9" + "@docusaurus/babel" "3.6.0" + "@docusaurus/cssnano-preset" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + autoprefixer "^10.4.14" + babel-loader "^9.2.1" + clean-css "^5.3.2" + copy-webpack-plugin "^11.0.0" + css-loader "^6.8.1" + css-minimizer-webpack-plugin "^5.0.1" + cssnano "^6.1.2" + file-loader "^6.2.0" + html-minifier-terser "^7.2.0" + mini-css-extract-plugin "^2.9.1" + null-loader "^4.0.1" + postcss "^8.4.26" + postcss-loader "^7.3.3" + react-dev-utils "^12.0.1" + terser-webpack-plugin "^5.3.9" + tslib "^2.6.0" + url-loader "^4.1.1" + webpack "^5.95.0" + webpackbar "^6.0.1" + +"@docusaurus/core@3.4.0", "@docusaurus/core@^3.2.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/core/-/core-3.4.0.tgz" integrity sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w== @@ -1342,6 +2253,55 @@ webpack-merge "^5.9.0" webpackbar "^5.0.2" +"@docusaurus/core@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-3.6.0.tgz#b23fc7e253a49cc3e5ac9e091354f497cc0b101b" + integrity sha512-lvRgMoKJJSRDt9+HhAqFcICV4kp/mw1cJJrLxIw4Q2XZnFGM1XUuwcbuaqWmGog+NcOLZaPCcCtZbn60EMCtjQ== + dependencies: + "@docusaurus/babel" "3.6.0" + "@docusaurus/bundler" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + boxen "^6.2.1" + chalk "^4.1.2" + chokidar "^3.5.3" + cli-table3 "^0.6.3" + combine-promises "^1.1.0" + commander "^5.1.0" + core-js "^3.31.1" + del "^6.1.1" + detect-port "^1.5.1" + escape-html "^1.0.3" + eta "^2.2.0" + eval "^0.1.8" + fs-extra "^11.1.1" + html-tags "^3.3.1" + html-webpack-plugin "^5.6.0" + leven "^3.1.0" + lodash "^4.17.21" + p-map "^4.0.0" + prompts "^2.4.2" + react-dev-utils "^12.0.1" + react-helmet-async "^1.3.0" + react-loadable "npm:@docusaurus/react-loadable@6.0.0" + react-loadable-ssr-addon-v5-slorber "^1.0.1" + react-router "^5.3.4" + react-router-config "^5.1.1" + react-router-dom "^5.3.4" + rtl-detect "^1.0.4" + semver "^7.5.4" + serve-handler "^6.1.6" + shelljs "^0.8.5" + tslib "^2.6.0" + update-notifier "^6.0.2" + webpack "^5.95.0" + webpack-bundle-analyzer "^4.10.2" + webpack-dev-server "^4.15.2" + webpack-merge "^6.0.1" + "@docusaurus/cssnano-preset@3.4.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz" @@ -1352,6 +2312,16 @@ postcss-sort-media-queries "^5.2.0" tslib "^2.6.0" +"@docusaurus/cssnano-preset@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-3.6.0.tgz#02378e53e9568ed5fc8871d4fc158ea96fd7421c" + integrity sha512-h3jlOXqqzNSoU+C4CZLNpFtD+v2xr1UBf4idZpwMgqid9r6lb5GS7tWKnQnauio6OipacbHbDXEX3JyT1PlDkg== + dependencies: + cssnano-preset-advanced "^6.1.2" + postcss "^8.4.38" + postcss-sort-media-queries "^5.2.0" + tslib "^2.6.0" + "@docusaurus/logger@3.4.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.4.0.tgz" @@ -1360,6 +2330,14 @@ chalk "^4.1.2" tslib "^2.6.0" +"@docusaurus/logger@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-3.6.0.tgz#c7349c2636087f55f573a60a3c7f69b87d59974d" + integrity sha512-BcQhoXilXW0607cH/kO6P5Gt5KxCGfoJ+QDKNf3yO2S09/RsITlW+0QljXPbI3DklTrHrhRDmgGk1yX4nUhWTA== + dependencies: + chalk "^4.1.2" + tslib "^2.6.0" + "@docusaurus/mdx-loader@3.4.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz" @@ -1390,7 +2368,37 @@ vfile "^6.0.1" webpack "^5.88.1" -"@docusaurus/module-type-aliases@^3.2.0", "@docusaurus/module-type-aliases@3.4.0": +"@docusaurus/mdx-loader@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-3.6.0.tgz#f8ba7af9d59473a7182f6a9307e0432f8dce905b" + integrity sha512-GhRzL1Af/AdSSrGesSPOU/iP/aXadTGmVKuysCxZDrQR2RtBtubQZ9aw+KvdFVV7R4K/CsbgD6J5oqrXlEPk3Q== + dependencies: + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + "@mdx-js/mdx" "^3.0.0" + "@slorber/remark-comment" "^1.0.0" + escape-html "^1.0.3" + estree-util-value-to-estree "^3.0.1" + file-loader "^6.2.0" + fs-extra "^11.1.1" + image-size "^1.0.2" + mdast-util-mdx "^3.0.0" + mdast-util-to-string "^4.0.0" + rehype-raw "^7.0.0" + remark-directive "^3.0.0" + remark-emoji "^4.0.0" + remark-frontmatter "^5.0.0" + remark-gfm "^4.0.0" + stringify-object "^3.3.0" + tslib "^2.6.0" + unified "^11.0.3" + unist-util-visit "^5.0.0" + url-loader "^4.1.1" + vfile "^6.0.1" + webpack "^5.88.1" + +"@docusaurus/module-type-aliases@3.4.0", "@docusaurus/module-type-aliases@^3.2.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz" integrity sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw== @@ -1403,6 +2411,21 @@ react-helmet-async "*" react-loadable "npm:@docusaurus/react-loadable@6.0.0" +"@docusaurus/plugin-client-redirects@^3.4.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-client-redirects/-/plugin-client-redirects-3.6.0.tgz#54155826e9e5da625e296eb93e346c11c1541a78" + integrity sha512-LIqRT6dtgxdENQH8XYwOOyxfKuzFD1ayJmIDCp9Yi/rbdcPE4vvTcESLGXOKvcyWZSfyCu+JA8Tyk4qpp+2J0w== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + eta "^2.2.0" + fs-extra "^11.1.1" + lodash "^4.17.21" + tslib "^2.6.0" + "@docusaurus/plugin-content-blog@3.4.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz" @@ -1426,7 +2449,7 @@ utility-types "^3.10.0" webpack "^5.88.1" -"@docusaurus/plugin-content-docs@^2 || ^3", "@docusaurus/plugin-content-docs@3.4.0": +"@docusaurus/plugin-content-docs@3.4.0", "@docusaurus/plugin-content-docs@^2 || ^3": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz" integrity sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg== @@ -1495,7 +2518,7 @@ "@types/gtag.js" "^0.0.12" tslib "^2.6.0" -"@docusaurus/plugin-google-tag-manager@3.4.0": +"@docusaurus/plugin-google-tag-manager@3.4.0", "@docusaurus/plugin-google-tag-manager@^3.2.0": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz" integrity sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ== @@ -1613,7 +2636,7 @@ tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-translations@^2 || ^3", "@docusaurus/theme-translations@3.4.0": +"@docusaurus/theme-translations@3.4.0", "@docusaurus/theme-translations@^2 || ^3": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz" integrity sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg== @@ -1636,14 +2659,36 @@ webpack "^5.88.1" webpack-merge "^5.9.0" -"@docusaurus/utils-common@^2 || ^3", "@docusaurus/utils-common@3.4.0": +"@docusaurus/types@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-3.6.0.tgz#8fa82332a7c7b8093b5c55e1115f5854ce484978" + integrity sha512-jADLgoZGWhAzThr+mRiyuFD4OUzt6jHnb7NRArRKorgxckqUBaPyFOau9hhbcSTHtU6ceyeWjN7FDt7uG2Hplw== + dependencies: + "@mdx-js/mdx" "^3.0.0" + "@types/history" "^4.7.11" + "@types/react" "*" + commander "^5.1.0" + joi "^17.9.2" + react-helmet-async "^1.3.0" + utility-types "^3.10.0" + webpack "^5.95.0" + webpack-merge "^5.9.0" + +"@docusaurus/utils-common@3.4.0", "@docusaurus/utils-common@^2 || ^3": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.4.0.tgz" integrity sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ== dependencies: tslib "^2.6.0" -"@docusaurus/utils-validation@^2 || ^3", "@docusaurus/utils-validation@3.4.0": +"@docusaurus/utils-common@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-3.6.0.tgz#11855ea503132bbcaba6ca4d351293ff10a75d34" + integrity sha512-diUDNfbw33GaZMmKwdTckT2IBfVouXLXRD+zphH9ywswuaEIKqixvuf5g41H7MBBrlMsxhna3uTMoB4B/OPDcA== + dependencies: + tslib "^2.6.0" + +"@docusaurus/utils-validation@3.4.0", "@docusaurus/utils-validation@^2 || ^3": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz" integrity sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g== @@ -1657,7 +2702,21 @@ lodash "^4.17.21" tslib "^2.6.0" -"@docusaurus/utils@^2 || ^3", "@docusaurus/utils@3.4.0": +"@docusaurus/utils-validation@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-3.6.0.tgz#5557ca14fa64ac29e6f70e61006be721395ecde5" + integrity sha512-CRHiKKJEKA0GFlfOf71JWHl7PtwOyX0+Zg9ep9NFEZv6Lcx3RJ9nhl7p8HRjPL6deyYceavM//BsfW4pCI4BtA== + dependencies: + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + fs-extra "^11.2.0" + joi "^17.9.2" + js-yaml "^4.1.0" + lodash "^4.17.21" + tslib "^2.6.0" + +"@docusaurus/utils@3.4.0", "@docusaurus/utils@^2 || ^3": version "3.4.0" resolved "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.4.0.tgz" integrity sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g== @@ -1683,6 +2742,32 @@ utility-types "^3.10.0" webpack "^5.88.1" +"@docusaurus/utils@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-3.6.0.tgz#192785da6fd62dfd83d6f1879c3aa45547f5df23" + integrity sha512-VKczAutI4mptiAw/WcYEu5WeVhQ6Q1zdIUl64SGw9K++9lziH+Kt10Ee8l2dMpRkiUk6zzK20kMNlX2WCUwXYQ== + dependencies: + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@svgr/webpack" "^8.1.0" + escape-string-regexp "^4.0.0" + file-loader "^6.2.0" + fs-extra "^11.1.1" + github-slugger "^1.5.0" + globby "^11.1.0" + gray-matter "^4.0.3" + jiti "^1.20.0" + js-yaml "^4.1.0" + lodash "^4.17.21" + micromatch "^4.0.5" + prompts "^2.4.2" + resolve-pathname "^3.0.0" + shelljs "^0.8.5" + tslib "^2.6.0" + url-loader "^4.1.1" + utility-types "^3.10.0" + webpack "^5.88.1" + "@easyops-cn/autocomplete.js@^0.38.1": version "0.38.1" resolved "https://registry.npmjs.org/@easyops-cn/autocomplete.js/-/autocomplete.js-0.38.1.tgz" @@ -1713,6 +2798,28 @@ mark.js "^8.11.1" tslib "^2.4.0" +"@emnapi/core@^1.1.0": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.3.1.tgz#9c62d185372d1bddc94682b87f376e03dfac3f16" + integrity sha512-pVGjBIt1Y6gg3EJN8jTcfpP/+uuRksIo055oE/OBkDNcjZqVbfkWCksG1Jp4yZnj3iKWyWX8fdG/j6UDYPbFog== + dependencies: + "@emnapi/wasi-threads" "1.0.1" + tslib "^2.4.0" + +"@emnapi/runtime@^1.1.0": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.3.1.tgz#0fcaa575afc31f455fd33534c19381cfce6c6f60" + integrity sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw== + dependencies: + tslib "^2.4.0" + +"@emnapi/wasi-threads@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.0.1.tgz#d7ae71fd2166b1c916c6cd2d0df2ef565a2e1a5b" + integrity sha512-iIBu7mwkq4UQGeMEM8bLwNK962nXdhodeScX4slfQnRhEMMzvYivHhutCIk8uojvmASXXPC2WNEjwxFWk72Oqw== + dependencies: + tslib "^2.4.0" + "@hapi/hoek@^9.0.0", "@hapi/hoek@^9.3.0": version "9.3.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz" @@ -1788,14 +2895,6 @@ resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz" integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== -"@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": - version "0.3.25" - resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" - integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== - dependencies: - "@jridgewell/resolve-uri" "^3.1.0" - "@jridgewell/sourcemap-codec" "^1.4.14" - "@jridgewell/trace-mapping@0.3.9": version "0.3.9" resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz" @@ -1804,6 +2903,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.5" resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz" @@ -1845,18 +2952,94 @@ dependencies: "@types/mdx" "^2.0.0" -"@mendable/search@^0.0.206": - version "0.0.206" - resolved "https://registry.npmjs.org/@mendable/search/-/search-0.0.206.tgz" - integrity sha512-T1qvSL4S0YXnQXaBjJ7DVzBDv+EpaPOm7ovacjL6qg5AtxdK8csF6T2rxj82hJBLcFzKmghEq8A8dQkfNiHLLw== +"@mendable/search@^0.0.206": + version "0.0.206" + resolved "https://registry.npmjs.org/@mendable/search/-/search-0.0.206.tgz" + integrity sha512-T1qvSL4S0YXnQXaBjJ7DVzBDv+EpaPOm7ovacjL6qg5AtxdK8csF6T2rxj82hJBLcFzKmghEq8A8dQkfNiHLLw== + dependencies: + html-react-parser "^4.2.0" + posthog-js "^1.45.1" + +"@napi-rs/wasm-runtime@^0.2.3": + version "0.2.5" + resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.5.tgz#b6f5079408305fe6a3529ccb2bb8ba8d9b7a02e7" + integrity sha512-kwUxR7J9WLutBbulqg1dfOrMTwhMdXLdcGUhcbCcGwnPLt3gz19uHVdwH1syKVDbE022ZS2vZxOWflFLS0YTjw== + dependencies: + "@emnapi/core" "^1.1.0" + "@emnapi/runtime" "^1.1.0" + "@tybys/wasm-util" "^0.9.0" + +"@node-rs/jieba-android-arm-eabi@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-android-arm-eabi/-/jieba-android-arm-eabi-1.10.3.tgz#821af26a4953b3fbdf2f80a4d08a9d9114b40bea" + integrity sha512-fuqVtaYlUKZg3cqagYFxj1DSa7ZHKXLle4iGH2kbQWg7Kw6cf7aCYBHIUZuH5sliK10M/CWccZ+SGRUwcSGfbg== + +"@node-rs/jieba-android-arm64@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-android-arm64/-/jieba-android-arm64-1.10.3.tgz#e5c285fb8de71739dfa3a83d894adcadb799c404" + integrity sha512-iuZZZq5yD9lT+AgaXpFe19gtAsIecUODRLLaBFbavjgjLk5cumv38ytWjS36s/eqptwI15MQfysSYOlWtMEG5g== + +"@node-rs/jieba-darwin-arm64@1.10.3": + version "1.10.3" + resolved "https://registry.npmjs.org/@node-rs/jieba-darwin-arm64/-/jieba-darwin-arm64-1.10.3.tgz" + integrity sha512-dwPhkav1tEARskwPz91UUXL2NXy4h0lJYTuJzpGgwXxm552zBM2JJ41kjah1364j+EOq5At3NQvf5r5rH89phQ== + +"@node-rs/jieba-darwin-x64@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-darwin-x64/-/jieba-darwin-x64-1.10.3.tgz#ffdc8a63335294d7c68d3aebec870ec0824ebe98" + integrity sha512-kjxvV6G1baQo/2I3mELv5qGv4Q0rhd5srwXhypSxMWZFtSpNwCDsLcIOR5bvMBci6QVFfZOs6WD6DKiWVz0SlA== + +"@node-rs/jieba-freebsd-x64@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-freebsd-x64/-/jieba-freebsd-x64-1.10.3.tgz#188349a9074b200af4a3e8a0ea169f45efd6c162" + integrity sha512-QYTsn+zlWRil+MuBeLfTK5Md4GluOf2lHnFqjrOZW2oMgNOvxB3qoLV4TUf70S/E2XHeP6PUdjCKItX8C7GQPg== + +"@node-rs/jieba-linux-arm-gnueabihf@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm-gnueabihf/-/jieba-linux-arm-gnueabihf-1.10.3.tgz#e1831b7b08a32904b12860555978c50222a97b54" + integrity sha512-UFB43kDOvqmbRl99e3GPwaTuwJZaAvgLaMTvBkmxww4MpQH6G1k31RLzMW/S21uSQso2lj6W/Mm59gaJk2FiyA== + +"@node-rs/jieba-linux-arm64-gnu@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm64-gnu/-/jieba-linux-arm64-gnu-1.10.3.tgz#326712eb7418f9796b113af93afe59ab64c37add" + integrity sha512-bu++yWi10wZtnS5uLcwxzxKmHVT77NgQMK8JiQr1TWCl3Y1Th7CnEHQtxfVB489edDK8l644h1/4zSTe5fRnOQ== + +"@node-rs/jieba-linux-arm64-musl@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-arm64-musl/-/jieba-linux-arm64-musl-1.10.3.tgz#6a3149d5abbe09f7c7748da219d5c39522b36c8a" + integrity sha512-pJh+SzrK1HaKakhdFM+ew9vXwpZqMxy9u0U7J4GT+3GvOwnAZ+KjeaHebIfgOz7ZHvp/T4YBNf8oWW4zwj3AJw== + +"@node-rs/jieba-linux-x64-gnu@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-x64-gnu/-/jieba-linux-x64-gnu-1.10.3.tgz#5d75fbc62a36cbb79137284abe4f432da06c2c80" + integrity sha512-GF5cfvu/0wXO2fVX/XV3WYH/xEGWzMBvfqLhGiA1OA1xHIufnA1T7uU3ZXkyoNi5Bzf6dmxnwtE4CJL0nvhwjQ== + +"@node-rs/jieba-linux-x64-musl@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-linux-x64-musl/-/jieba-linux-x64-musl-1.10.3.tgz#fce3aa9c394dbc51b4b3e92d29b385b4c4f23aec" + integrity sha512-h45HMVU/hgzQ0saXNsK9fKlGdah1i1cXZULpB5vQRlRL2ZIaGp+ULtWTogS7vkoo2K8s2l4tqakWMg9eUjIJ2A== + +"@node-rs/jieba-wasm32-wasi@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-wasm32-wasi/-/jieba-wasm32-wasi-1.10.3.tgz#b852eb2c9b8c81c5514ed8bb76d74c1cdf66fe76" + integrity sha512-vuoQ62vVoedNGcBmIi4UWdtNBOZG8B+vDYfjx3FD6rNg6g/RgwbVjYXbOVMOQwX06Ob9CfrutICXdUGHgoxzEQ== dependencies: - html-react-parser "^4.2.0" - posthog-js "^1.45.1" + "@napi-rs/wasm-runtime" "^0.2.3" -"@node-rs/jieba-darwin-arm64@1.10.3": +"@node-rs/jieba-win32-arm64-msvc@1.10.3": version "1.10.3" - resolved "https://registry.npmjs.org/@node-rs/jieba-darwin-arm64/-/jieba-darwin-arm64-1.10.3.tgz" - integrity sha512-dwPhkav1tEARskwPz91UUXL2NXy4h0lJYTuJzpGgwXxm552zBM2JJ41kjah1364j+EOq5At3NQvf5r5rH89phQ== + resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-arm64-msvc/-/jieba-win32-arm64-msvc-1.10.3.tgz#eefce48df8ec0496a0e45593d0b5f8981bb32b80" + integrity sha512-B8t4dh56TZnMLBoYWDkopf1ed37Ru/iU1qiIeBkbZWXGmNBChNZUOd//eaPOFjx8m9Sfc8bkj3FBRWt/kTAhmw== + +"@node-rs/jieba-win32-ia32-msvc@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-ia32-msvc/-/jieba-win32-ia32-msvc-1.10.3.tgz#edfb74e880a32f66a6810502957b62f9b042b487" + integrity sha512-SKuPGZJ5T+X4jOn1S8LklOSZ6HC7UBiw0hwi2z9uqX6WgElquLjGi/xfZ2gPqffeR/5K/PUu7aqYUUPL1XonVQ== + +"@node-rs/jieba-win32-x64-msvc@1.10.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@node-rs/jieba-win32-x64-msvc/-/jieba-win32-x64-msvc-1.10.3.tgz#285a24134d9c367b11d73060bdc37c351c3e60b5" + integrity sha512-j9I4+a/tf2hsLu8Sr0NhcLBVNBBQctO2mzcjemMpRa1SlEeODyic9RIyP8Ljz3YTN6MYqKh1KA9iR1xvxjxYFg== "@node-rs/jieba@^1.6.0": version "1.10.3" @@ -1886,7 +3069,7 @@ "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" -"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== @@ -2160,6 +3343,13 @@ resolved "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz" integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== +"@tybys/wasm-util@^0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@tybys/wasm-util/-/wasm-util-0.9.0.tgz#3e75eb00604c8d6db470bf18c37b7d984a0e3355" + integrity sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw== + dependencies: + tslib "^2.4.0" + "@types/acorn@^4.0.0": version "4.0.6" resolved "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz" @@ -2214,7 +3404,7 @@ dependencies: "@types/ms" "*" -"@types/eslint-scope@^3.7.3": +"@types/eslint-scope@^3.7.3", "@types/eslint-scope@^3.7.7": version "3.7.7" resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz" integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== @@ -2242,6 +3432,11 @@ resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz" integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== +"@types/estree@^1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== + "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": version "4.19.5" resolved "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.5.tgz" @@ -2524,7 +3719,7 @@ resolved "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@webassemblyjs/ast@^1.12.1", "@webassemblyjs/ast@1.12.1": +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": version "1.12.1" resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz" integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== @@ -2625,7 +3820,7 @@ "@webassemblyjs/wasm-gen" "1.12.1" "@webassemblyjs/wasm-parser" "1.12.1" -"@webassemblyjs/wasm-parser@^1.12.1", "@webassemblyjs/wasm-parser@1.12.1": +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": version "1.12.1" resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz" integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== @@ -2685,6 +3880,11 @@ acorn@^8.0.0, acorn@^8.0.4, acorn@^8.11.0, acorn@^8.4.1, acorn@^8.7.1, acorn@^8. resolved "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz" integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== +acorn@^8.14.0: + version "8.14.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" + integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== + address@^1.0.1, address@^1.1.2: version "1.2.2" resolved "https://registry.npmjs.org/address/-/address-1.2.2.tgz" @@ -2705,12 +3905,7 @@ ajv-formats@^2.1.1: dependencies: ajv "^8.0.0" -ajv-keywords@^3.4.1: - version "3.5.2" - resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^3.5.2: +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== @@ -2722,17 +3917,7 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" -ajv@^6.12.2: - version "6.12.6" - resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.5: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2787,6 +3972,13 @@ ansi-align@^3.0.1: dependencies: string-width "^4.1.0" +ansi-escapes@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + ansi-html-community@^0.0.8: version "0.0.8" resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" @@ -3118,6 +4310,14 @@ babel-loader@^9.1.3: find-cache-dir "^4.0.0" schema-utils "^4.0.0" +babel-loader@^9.2.1: + version "9.2.1" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-9.2.1.tgz#04c7835db16c246dd19ba0914418f3937797587b" + integrity sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA== + dependencies: + find-cache-dir "^4.0.0" + schema-utils "^4.0.0" + babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.npmjs.org/babel-messages/-/babel-messages-6.23.0.tgz" @@ -3156,6 +4356,14 @@ babel-plugin-polyfill-corejs3@^0.10.1, babel-plugin-polyfill-corejs3@^0.10.4: "@babel/helper-define-polyfill-provider" "^0.6.1" core-js-compat "^3.36.1" +babel-plugin-polyfill-corejs3@^0.10.6: + version "0.10.6" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.6.tgz#2deda57caef50f59c525aeb4964d3b2f867710c7" + integrity sha512-b37+KR2i/khY5sKmWNVQAnitvquQbNdWy6lJdsr0kmquCKEEUgMKK4SboVM3HtfnZilfjr4MMQ7vY58FVWDtIA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.6.2" + core-js-compat "^3.38.0" + babel-plugin-polyfill-regenerator@^0.6.1: version "0.6.2" resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz" @@ -3678,17 +4886,7 @@ binary-extensions@^2.0.0: resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz" integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== -bn.js@^4.0.0: - version "4.12.0" - resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^4.1.0: - version "4.12.0" - resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^4.11.9: +bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz" integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== @@ -3856,6 +5054,16 @@ browserslist@^4.0.0, browserslist@^4.18.1, browserslist@^4.21.10, browserslist@^ node-releases "^2.0.14" update-browserslist-db "^1.1.0" +browserslist@^4.24.0, browserslist@^4.24.2: + version "4.24.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.2.tgz#f5845bc91069dbd55ee89faf9822e1d885d16580" + integrity sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg== + dependencies: + caniuse-lite "^1.0.30001669" + electron-to-chromium "^1.5.41" + node-releases "^2.0.18" + update-browserslist-db "^1.1.1" + buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" @@ -3979,6 +5187,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001599, caniuse-lite@^1.0.30001640: resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001641.tgz" integrity sha512-Phv5thgl67bHYo1TtMY/MurjkHhV4EDaCosezRXgZ8jzA/Ub+wjxAvbGvjoFENStinwi5kCyOYV3mi5tOGykwA== +caniuse-lite@^1.0.30001669: + version "1.0.30001678" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001678.tgz#b930b04cd0b295136405634aa32ad540d7eeb71e" + integrity sha512-RR+4U/05gNtps58PEBDZcPWTgEO2MBeoPZ96aQcjmfkBWRIDfN451fW2qyDA9/+HohLLIL5GqiMwA+IB1pWarw== + ccount@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz" @@ -4171,16 +5384,16 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - color-name@1.1.3: version "1.1.3" resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + colord@^2.9.3: version "2.9.3" resolved "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz" @@ -4302,6 +5515,11 @@ consola@^2.15.3: resolved "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz" integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw== +consola@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/consola/-/consola-3.2.3.tgz#0741857aa88cfa0d6fd53f1cff0375136e98502f" + integrity sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ== + console-browserify@^1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz" @@ -4373,17 +5591,19 @@ core-js-compat@^3.36.1, core-js-compat@^3.37.1: dependencies: browserslist "^4.23.0" +core-js-compat@^3.38.0, core-js-compat@^3.38.1: + version "3.39.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.39.0.tgz#b12dccb495f2601dc860bdbe7b4e3ffa8ba63f61" + integrity sha512-VgEUx3VwlExr5no0tXlBt+silBvhTryPwCXRI2Id1PN8WTKu7MreethvddqOubrYxkFdv/RnYrqlv1sFNAUelw== + dependencies: + browserslist "^4.24.2" + core-js-pure@^3.30.2: version "3.37.1" resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.37.1.tgz" integrity sha512-J/r5JTHSmzTxbiYYrzXg9w1VpqrYt+gexenBE9pugeyhwPZTAEJddyiReJWsLO6uNQ8xJZFbod6XC7KKwatCiA== -core-js@^2.4.0: - version "2.6.12" - resolved "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz" - integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== - -core-js@^2.5.0: +core-js@^2.4.0, core-js@^2.5.0: version "2.6.12" resolved "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz" integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== @@ -4469,16 +5689,7 @@ cross-spawn@^5.0.1: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.0: - version "7.0.3" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -cross-spawn@^7.0.3: +cross-spawn@^7.0.0, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -4680,41 +5891,20 @@ debounce@^1.2.1: resolved "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz" integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== -debug@^2.6.0: - version "2.6.9" - resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^2.6.8: +debug@2.6.9, debug@^2.6.0, debug@^2.6.8, debug@^2.6.9: version "2.6.9" resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@^2.6.9: - version "2.6.9" - resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@4: +debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1: version "4.3.5" resolved "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz" integrity sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg== dependencies: ms "2.1.2" -debug@2.6.9: - version "2.6.9" - resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - decode-named-character-reference@^1.0.0: version "1.0.2" resolved "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz" @@ -4793,16 +5983,16 @@ delayed-stream@~1.0.0: resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" - integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== - depd@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + dequal@^2.0.0: version "2.0.3" resolved "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz" @@ -4946,6 +6136,13 @@ domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0: resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz" integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== +domhandler@5.0.3, domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: version "4.3.1" resolved "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz" @@ -4953,13 +6150,6 @@ domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: dependencies: domelementtype "^2.2.0" -domhandler@^5.0.2, domhandler@^5.0.3, domhandler@5.0.3: - version "5.0.3" - resolved "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz" - integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== - dependencies: - domelementtype "^2.3.0" - domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" resolved "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz" @@ -5013,10 +6203,15 @@ electron-to-chromium@^1.4.820: resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.827.tgz" integrity sha512-VY+J0e4SFcNfQy19MEoMdaIcZLmDCprqvBtkii1WTCTQHpRvf5N8+3kTYCgL/PcntvwQvmMJWTuDPsq+IlhWKQ== +electron-to-chromium@^1.5.41: + version "1.5.52" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.52.tgz#2bed832c95a56a195504f918150e548474687da8" + integrity sha512-xtoijJTZ+qeucLBDNztDOuQBE1ksqjvNjvqFoST3nGC7fSpqJ+X6BdTBaY5BHG+IhWWmpc6b/KfpeuEDupEPOQ== + elliptic@^6.5.3, elliptic@^6.5.5: - version "6.5.5" - resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.5.5.tgz" - integrity sha512-7EjbcmUm17NQFu4Pmgmq2olYMj8nwMnpcddByChSUjArp8F5DQWcIcpriwO4ZToLNAJig0yiyjswfyGNje/ixw== + version "6.6.0" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.6.0.tgz#5919ec723286c1edf28685aa89261d4761afa210" + integrity sha512-dpwoQcLc/2WLQvJvLRHKZ+f9FgOdjnq11rurqwekGQygGPsYSK29OMMD2WalatiqQ+XGFDglTNixpPfI+lpaAA== dependencies: bn.js "^4.11.9" brorand "^1.1.0" @@ -5071,6 +6266,14 @@ enhanced-resolve@^5.17.0: graceful-fs "^4.2.4" tapable "^2.2.0" +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + entities@^2.0.0: version "2.2.0" resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" @@ -5110,6 +6313,11 @@ escalade@^3.1.1, escalade@^3.1.2: resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz" integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-goat@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz" @@ -5120,12 +6328,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@^1.0.2: - version "1.0.5" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== @@ -5394,6 +6597,13 @@ fflate@^0.4.8: resolved "https://registry.npmjs.org/fflate/-/fflate-0.4.8.tgz" integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA== +figures@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + file-loader@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" @@ -5550,16 +6760,7 @@ fresh@0.5.2: resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== -fs-extra@^10.0.0: - version "10.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-extra@^10.1.0: +fs-extra@^10.0.0, fs-extra@^10.1.0: version "10.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== @@ -5635,12 +6836,7 @@ get-stream@^5.1.0: dependencies: pump "^3.0.0" -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -get-stream@^6.0.1: +get-stream@^6.0.0, get-stream@^6.0.1: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== @@ -5657,14 +6853,7 @@ glob-parent@^5.1.2, glob-parent@~5.1.2: dependencies: is-glob "^4.0.1" -glob-parent@^6.0.1: - version "6.0.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob-parent@^6.0.2: +glob-parent@^6.0.1, glob-parent@^6.0.2: version "6.0.2" resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== @@ -5807,16 +6996,16 @@ got@^12.1.0: p-cancelable "^3.0.0" responselike "^3.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.11" - resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - graceful-fs@4.2.10: version "4.2.10" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.11" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + gray-matter@^4.0.3: version "4.0.3" resolved "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz" @@ -6148,6 +7337,27 @@ html-webpack-plugin@^5.5.3: pretty-error "^4.0.0" tapable "^2.0.0" +html-webpack-plugin@^5.6.0: + version "5.6.3" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.6.3.tgz#a31145f0fee4184d53a794f9513147df1e653685" + integrity sha512-QSf1yjtSAsmf7rYBV7XX86uua4W/vkhIt0xNXKbsi2foEeW7vjJQz4bhnpL3xH+l1ryl1680uNv968Z+X6jSYg== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@9.0.0: + version "9.0.0" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.0.0.tgz" + integrity sha512-uxbSI98wmFT/G4P2zXx4OVx04qWUmyFPrD2/CNepa2Zo3GPNaCaaxElDgwUrwYWkK1nr9fft0Ya8dws8coDLLQ== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.1.0" + entities "^4.5.0" + htmlparser2@^6.1.0: version "6.1.0" resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz" @@ -6168,16 +7378,6 @@ htmlparser2@^8.0.1: domutils "^3.0.1" entities "^4.4.0" -htmlparser2@9.0.0: - version "9.0.0" - resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.0.0.tgz" - integrity sha512-uxbSI98wmFT/G4P2zXx4OVx04qWUmyFPrD2/CNepa2Zo3GPNaCaaxElDgwUrwYWkK1nr9fft0Ya8dws8coDLLQ== - dependencies: - domelementtype "^2.3.0" - domhandler "^5.0.3" - domutils "^3.1.0" - entities "^4.5.0" - http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz" @@ -6188,16 +7388,6 @@ http-deceiver@^1.2.7: resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - http-errors@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" @@ -6209,15 +7399,25 @@ http-errors@2.0.0: statuses "2.0.1" toidentifier "1.0.1" +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + http-parser-js@>=0.5.1: version "0.5.8" resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + version "2.0.7" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.7.tgz#915f236d92ae98ef48278a95dedf17e991936ec6" + integrity sha512-fgVY8AV7qU7z/MmXJ/rxwbrtQH4jBQ9m7kp3llF0liB7glmFeVZFBepQb32T3y8n8k2+AEYuMPCpinYW+/CuRA== dependencies: "@types/http-proxy" "^1.17.8" http-proxy "^1.18.1" @@ -6335,7 +7535,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4, inherits@2, inherits@2.0.4: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -6345,16 +7545,16 @@ inherits@2.0.3: resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== -ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: - version "1.3.8" - resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - ini@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== +ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: + version "1.3.8" + resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + inline-style-parser@0.1.1: version "0.1.1" resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" @@ -6382,16 +7582,16 @@ invariant@^2.2.2, invariant@^2.2.4: dependencies: loose-envify "^1.0.0" -ipaddr.js@^2.0.1: - version "2.2.0" - resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz" - integrity sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA== - ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== +ipaddr.js@^2.0.1: + version "2.2.0" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz" + integrity sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA== + is-alphabetical@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz" @@ -6602,16 +7802,16 @@ is-yarn-global@^0.4.0: resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.4.1.tgz" integrity sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ== -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - isarray@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + isexe@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" @@ -6713,6 +7913,11 @@ jsesc@^2.5.1: resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== +jsesc@^3.0.2, jsesc@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" + integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== + jsesc@~0.5.0: version "0.5.0" resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" @@ -7690,22 +8895,15 @@ miller-rabin@^4.0.0: bn.js "^4.0.0" brorand "^1.0.1" -"mime-db@>= 1.43.0 < 2", mime-db@1.52.0: +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": version "1.52.0" resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-db@~1.33.0: - version "1.33.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" - integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== - -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" + version "1.33.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" + integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== mime-types@2.1.18: version "2.1.18" @@ -7714,6 +8912,13 @@ mime-types@2.1.18: dependencies: mime-db "~1.33.0" +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + mime@1.6.0: version "1.6.0" resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" @@ -7747,6 +8952,14 @@ mini-css-extract-plugin@^2.7.6: schema-utils "^4.0.0" tapable "^2.2.1" +mini-css-extract-plugin@^2.9.1: + version "2.9.2" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.2.tgz#966031b468917a5446f4c24a80854b2947503c5b" + integrity sha512-GJuACcS//jtq4kCtd5ii/M0SZf7OZRH+BxdqXZHaJfb8TJiVl+NgQRPwiYt2EuqeSkNydn/7vP+bcE27C5mb9w== + dependencies: + schema-utils "^4.0.0" + tapable "^2.2.1" + minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" @@ -7757,7 +8970,7 @@ minimalistic-crypto-utils@^1.0.1: resolved "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz" integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== -minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@3.1.2: +minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -7788,12 +9001,7 @@ minipass@^4.2.4: resolved "https://registry.npmjs.org/minipass/-/minipass-4.2.8.tgz" integrity sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ== -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": - version "7.1.2" - resolved "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz" - integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== - -minipass@^7.1.2: +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: version "7.1.2" resolved "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz" integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== @@ -7875,7 +9083,7 @@ node-emoji@^2.1.0: emojilib "^2.4.0" skin-tone "^2.0.0" -node-fetch@^2.0.0, node-fetch@^2.6.1, node-fetch@2: +node-fetch@2, node-fetch@^2.0.0, node-fetch@^2.6.1: version "2.7.0" resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== @@ -7929,6 +9137,11 @@ node-releases@^2.0.14: resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz" integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" @@ -7999,6 +9212,14 @@ nth-check@^2.0.1: dependencies: boolbase "^1.0.0" +null-loader@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-4.0.1.tgz#8e63bd3a2dd3c64236a4679428632edd0a6dbc6a" + integrity sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + object-assign@^4.0.1, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" @@ -8353,13 +9574,6 @@ path-scurry@^1.11.1, path-scurry@^1.6.1: lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" @@ -8370,6 +9584,18 @@ path-to-regexp@2.2.1: resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz" integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== +path-to-regexp@3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-3.3.0.tgz#f7f31d32e8518c2660862b644414b6d5c63a611b" + integrity sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw== + +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + path-type@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" @@ -8413,6 +9639,11 @@ picocolors@^1.0.0, picocolors@^1.0.1: resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz" integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== +picocolors@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" @@ -8887,12 +10118,7 @@ pump@^3.0.0: end-of-stream "^1.1.0" once "^1.3.1" -punycode@^1.3.2: - version "1.4.1" - resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" - integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== - -punycode@^1.4.1: +punycode@^1.3.2, punycode@^1.4.1: version "1.4.1" resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== @@ -8909,13 +10135,6 @@ pupa@^3.1.0: dependencies: escape-goat "^4.0.0" -qs@^6.11.2: - version "6.12.3" - resolved "https://registry.npmjs.org/qs/-/qs-6.12.3.tgz" - integrity sha512-AWJm14H1vVaO/iNZ4/hO+HyaTehuy9nRqVdkTqlJt0HWvBiBIEXFmb4C0DGeYo3Xes9rrEW+TxHsaigCbN5ICQ== - dependencies: - side-channel "^1.0.6" - qs@6.11.0: version "6.11.0" resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" @@ -8923,6 +10142,13 @@ qs@6.11.0: dependencies: side-channel "^1.0.4" +qs@^6.11.2: + version "6.12.3" + resolved "https://registry.npmjs.org/qs/-/qs-6.12.3.tgz" + integrity sha512-AWJm14H1vVaO/iNZ4/hO+HyaTehuy9nRqVdkTqlJt0HWvBiBIEXFmb4C0DGeYo3Xes9rrEW+TxHsaigCbN5ICQ== + dependencies: + side-channel "^1.0.6" + querystring-es3@^0.2.1: version "0.2.1" resolved "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz" @@ -8960,21 +10186,16 @@ randomfill@^1.0.3: randombytes "^2.0.5" safe-buffer "^5.1.0" -range-parser@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - range-parser@1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + raw-body@2.5.2: version "2.5.2" resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz" @@ -9121,7 +10342,7 @@ react-router-dom@^5.3.4: tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@^5.3.4, react-router@5.3.4: +react-router@5.3.4, react-router@^5.3.4: version "5.3.4" resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz" integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== @@ -9150,20 +10371,7 @@ read-cache@^1.0.0: dependencies: pify "^2.3.0" -readable-stream@^2.0.1: - version "2.3.8" - resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" - integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^2.3.8: +readable-stream@^2.0.1, readable-stream@^2.3.8: version "2.3.8" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -9225,6 +10433,13 @@ regenerate-unicode-properties@^10.1.0: dependencies: regenerate "^1.4.2" +regenerate-unicode-properties@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz#626e39df8c372338ea9b8028d1f99dc3fd9c3db0" + integrity sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA== + dependencies: + regenerate "^1.4.2" + regenerate@^1.2.1, regenerate@^1.4.2: version "1.4.2" resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" @@ -9277,6 +10492,18 @@ regexpu-core@^5.3.1: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" +regexpu-core@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-6.1.1.tgz#b469b245594cb2d088ceebc6369dceb8c00becac" + integrity sha512-k67Nb9jvwJcJmVpw0jPttR1/zVfnKf8Km0IPatrU/zJ5XeG3+Slx0xLXs9HByJSzXzrlz5EDvN6yLNMDc2qdnw== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.2.0" + regjsgen "^0.8.0" + regjsparser "^0.11.0" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" + registry-auth-token@^5.0.1: version "5.0.2" resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz" @@ -9296,6 +10523,11 @@ regjsgen@^0.2.0: resolved "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz" integrity sha512-x+Y3yA24uF68m5GA+tBjbGYo64xXVJpbToBaWCoSNSc1hdk6dfctaRWrNFTVJZIIhL5GxW8zwjoixbnifnK59g== +regjsgen@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.8.0.tgz#df23ff26e0c5b300a6470cad160a9d090c3a37ab" + integrity sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q== + regjsparser@^0.1.4: version "0.1.5" resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz" @@ -9303,6 +10535,13 @@ regjsparser@^0.1.4: dependencies: jsesc "~0.5.0" +regjsparser@^0.11.0: + version "0.11.2" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.11.2.tgz#7404ad42be00226d72bcf1f003f1f441861913d8" + integrity sha512-3OGZZ4HoLJkkAZx/48mTXJNlmqTGOzc0o9OWQPuWpkOlXXPbyN6OafCcoXUnBqE2D3f/T5L+pWc1kdEmnfnRsA== + dependencies: + jsesc "~3.0.2" + regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz" @@ -9542,20 +10781,15 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@>=5.1.0, safe-buffer@~5.2.0, safe-buffer@5.2.1: - version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== "safer-buffer@>= 2.1.2 < 3": version "2.1.2" @@ -9581,25 +10815,16 @@ scheduler@^0.23.2: dependencies: loose-envify "^1.1.0" -schema-utils@^3.0.0: - version "3.3.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" - integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^3.1.1: - version "3.3.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" - integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" -schema-utils@^3.2.0: +schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: version "3.3.0" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== @@ -9618,15 +10843,6 @@ schema-utils@^4.0.0, schema-utils@^4.0.1: ajv-formats "^2.1.1" ajv-keywords "^5.1.0" -schema-utils@2.7.0: - version "2.7.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - section-matter@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" @@ -9705,6 +10921,19 @@ serve-handler@^6.1.5: path-to-regexp "2.2.1" range-parser "1.2.0" +serve-handler@^6.1.6: + version "6.1.6" + resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-6.1.6.tgz#50803c1d3e947cd4a341d617f8209b22bd76cfa1" + integrity sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ== + dependencies: + bytes "3.0.0" + content-disposition "0.5.2" + mime-types "2.1.18" + minimatch "3.1.2" + path-is-inside "1.0.2" + path-to-regexp "3.3.0" + range-parser "1.2.0" + serve-index@^1.9.1: version "1.9.1" resolved "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz" @@ -9921,17 +11150,12 @@ source-map-support@~0.5.20: buffer-from "^1.0.0" source-map "^0.6.0" -source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" - integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== - -source-map@^0.5.7: +source-map@^0.5.6, source-map@^0.5.7: version "0.5.7" resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== -source-map@^0.6.0: +source-map@^0.6.0, source-map@~0.6.0: version "0.6.1" resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -9941,11 +11165,6 @@ source-map@^0.7.0: resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz" integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== -source-map@~0.6.0: - version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - space-separated-tokens@^2.0.0: version "2.0.2" resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz" @@ -9984,17 +11203,17 @@ srcset@^4.0.0: resolved "https://registry.npmjs.org/srcset/-/srcset-4.0.0.tgz" integrity sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw== -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - statuses@2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== -std-env@^3.0.1: +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +std-env@^3.0.1, std-env@^3.7.0: version "3.7.0" resolved "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz" integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== @@ -10017,20 +11236,6 @@ stream-http@^3.2.0: readable-stream "^3.6.0" xtend "^4.0.2" -string_decoder@^1.1.1, string_decoder@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - "string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" @@ -10040,16 +11245,7 @@ string_decoder@~1.1.1: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^4.1.0: - version "4.2.3" - resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string-width@^4.2.0: +string-width@^4.1.0, string-width@^4.2.0: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -10067,6 +11263,20 @@ string-width@^5.0.1, string-width@^5.1.2: emoji-regex "^9.2.2" strip-ansi "^7.0.1" +string_decoder@^1.1.1, string_decoder@^1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + stringify-entities@^4.0.0: version "4.0.4" resolved "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz" @@ -10147,6 +11357,13 @@ style-to-js@1.1.8: dependencies: style-to-object "1.0.3" +style-to-object@1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.3.tgz" + integrity sha512-xOpx7S53E0V3DpVsvt7ySvoiumRpfXiC99PUXLqGB3wiAnN9ybEIpuzlZ8LAZg+h1sl9JkEUwtSQXxcCgFqbbg== + dependencies: + inline-style-parser "0.2.2" + style-to-object@^0.4.0: version "0.4.4" resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.4.tgz" @@ -10161,13 +11378,6 @@ style-to-object@^1.0.0: dependencies: inline-style-parser "0.2.3" -style-to-object@1.0.3: - version "1.0.3" - resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.3.tgz" - integrity sha512-xOpx7S53E0V3DpVsvt7ySvoiumRpfXiC99PUXLqGB3wiAnN9ybEIpuzlZ8LAZg+h1sl9JkEUwtSQXxcCgFqbbg== - dependencies: - inline-style-parser "0.2.2" - stylehacks@^6.1.1: version "6.1.1" resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-6.1.1.tgz" @@ -10434,6 +11644,11 @@ tty-browserify@^0.0.1: resolved "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz" integrity sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw== +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + type-fest@^1.0.1: version "1.4.0" resolved "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz" @@ -10575,7 +11790,7 @@ universalify@^2.0.0: resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz" integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== -unpipe@~1.0.0, unpipe@1.0.0: +unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== @@ -10588,6 +11803,14 @@ update-browserslist-db@^1.1.0: escalade "^3.1.2" picocolors "^1.0.1" +update-browserslist-db@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz#80846fba1d79e82547fb661f8d141e0945755fe5" + integrity sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.0" + update-notifier@^6.0.2: version "6.0.2" resolved "https://registry.npmjs.org/update-notifier/-/update-notifier-6.0.2.tgz" @@ -10763,7 +11986,7 @@ webidl-conversions@^3.0.0: resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== -webpack-bundle-analyzer@^4.9.0: +webpack-bundle-analyzer@^4.10.2, webpack-bundle-analyzer@^4.9.0: version "4.10.2" resolved "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.10.2.tgz" integrity sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw== @@ -10792,7 +12015,7 @@ webpack-dev-middleware@^5.3.4: range-parser "^1.2.1" schema-utils "^4.0.0" -webpack-dev-server@^4.15.1: +webpack-dev-server@^4.15.1, webpack-dev-server@^4.15.2: version "4.15.2" resolved "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.2.tgz" integrity sha512-0XavAZbNJ5sDrCbkpWL8mia0o5WPOd2YGtxrEiZkBK9FjLppIUK2TgxK6qGD2P3hUXTJNNPVibrerKcx5WkR1g== @@ -10837,6 +12060,15 @@ webpack-merge@^5.9.0: flat "^5.0.2" wildcard "^2.0.0" +webpack-merge@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-6.0.1.tgz#50c776868e080574725abc5869bd6e4ef0a16c6a" + integrity sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg== + dependencies: + clone-deep "^4.0.1" + flat "^5.0.2" + wildcard "^2.0.1" + webpack-sources@^3.2.3: version "3.2.3" resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" @@ -10872,6 +12104,35 @@ webpack@^5.88.1: watchpack "^2.4.1" webpack-sources "^3.2.3" +webpack@^5.95.0: + version "5.96.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.96.1.tgz#3676d1626d8312b6b10d0c18cc049fba7ac01f0c" + integrity sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA== + dependencies: + "@types/eslint-scope" "^3.7.7" + "@types/estree" "^1.0.6" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" + acorn "^8.14.0" + browserslist "^4.24.0" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.17.1" + es-module-lexer "^1.2.1" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.11" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.2.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" + webpack-sources "^3.2.3" + webpackbar@^5.0.2: version "5.0.2" resolved "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz" @@ -10882,7 +12143,21 @@ webpackbar@^5.0.2: pretty-time "^1.1.0" std-env "^3.0.1" -websocket-driver@^0.7.4, websocket-driver@>=0.5.1: +webpackbar@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/webpackbar/-/webpackbar-6.0.1.tgz#5ef57d3bf7ced8b19025477bc7496ea9d502076b" + integrity sha512-TnErZpmuKdwWBdMoexjio3KKX6ZtoKHRVvLIU0A47R0VVBDtx3ZyOJDktgYixhoJokZTYTt1Z37OkO9pnGJa9Q== + dependencies: + ansi-escapes "^4.3.2" + chalk "^4.1.2" + consola "^3.2.3" + figures "^3.2.0" + markdown-table "^2.0.0" + pretty-time "^1.1.0" + std-env "^3.7.0" + wrap-ansi "^7.0.0" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: version "0.7.4" resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== @@ -10936,7 +12211,7 @@ widest-line@^4.0.1: dependencies: string-width "^5.0.1" -wildcard@^2.0.0: +wildcard@^2.0.0, wildcard@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz" integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== @@ -10950,6 +12225,15 @@ wildcard@^2.0.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.0.1, wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz" diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 0706ef76c5e0..000000000000 --- a/poetry.lock +++ /dev/null @@ -1,11563 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "aenum" -version = "3.1.15" -description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" -optional = false -python-versions = "*" -files = [ - {file = "aenum-3.1.15-py2-none-any.whl", hash = "sha256:27b1710b9d084de6e2e695dab78fe9f269de924b51ae2850170ee7e1ca6288a5"}, - {file = "aenum-3.1.15-py3-none-any.whl", hash = "sha256:e0dfaeea4c2bd362144b87377e2c61d91958c5ed0b4daf89cb6f45ae23af6288"}, - {file = "aenum-3.1.15.tar.gz", hash = "sha256:8cbd76cd18c4f870ff39b24284d3ea028fbe8731a58df3aa581e434c575b9559"}, -] - -[[package]] -name = "aiofiles" -version = "24.1.0" -description = "File support for asyncio." -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, - {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, -] - -[[package]] -name = "aiohappyeyeballs" -version = "2.3.5" -description = "Happy Eyeballs for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, -] - -[[package]] -name = "aiohttp" -version = "3.10.3" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, - {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, - {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"}, - {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"}, - {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d1100e68e70eb72eadba2b932b185ebf0f28fd2f0dbfe576cfa9d9894ef49752"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a541414578ff47c0a9b0b8b77381ea86b0c8531ab37fc587572cb662ccd80b88"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d5548444ef60bf4c7b19ace21f032fa42d822e516a6940d36579f7bfa8513f9c"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba2e838b5e6a8755ac8297275c9460e729dc1522b6454aee1766c6de6d56e5e"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48665433bb59144aaf502c324694bec25867eb6630fcd831f7a893ca473fcde4"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bac352fceed158620ce2d701ad39d4c1c76d114255a7c530e057e2b9f55bdf9f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0f670502100cdc567188c49415bebba947eb3edaa2028e1a50dd81bd13363f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b09f38a67679e32d380fe512189ccb0b25e15afc79b23fbd5b5e48e4fc8fd9"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cd788602e239ace64f257d1c9d39898ca65525583f0fbf0988bcba19418fe93f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:214277dcb07ab3875f17ee1c777d446dcce75bea85846849cc9d139ab8f5081f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:32007fdcaab789689c2ecaaf4b71f8e37bf012a15cd02c0a9db8c4d0e7989fa8"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:123e5819bfe1b87204575515cf448ab3bf1489cdeb3b61012bde716cda5853e7"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:812121a201f0c02491a5db335a737b4113151926a79ae9ed1a9f41ea225c0e3f"}, - {file = "aiohttp-3.10.3-cp38-cp38-win32.whl", hash = "sha256:b97dc9a17a59f350c0caa453a3cb35671a2ffa3a29a6ef3568b523b9113d84e5"}, - {file = "aiohttp-3.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:3731a73ddc26969d65f90471c635abd4e1546a25299b687e654ea6d2fc052394"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, - {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, - {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, - {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, -] - -[package.dependencies] -aiohappyeyeballs = ">=2.3.0" -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] - -[[package]] -name = "aiolimiter" -version = "1.1.0" -description = "asyncio rate limiter, a leaky bucket implementation" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "aiolimiter-1.1.0-py3-none-any.whl", hash = "sha256:0b4997961fc58b8df40279e739f9cf0d3e255e63e9a44f64df567a8c17241e24"}, - {file = "aiolimiter-1.1.0.tar.gz", hash = "sha256:461cf02f82a29347340d031626c92853645c099cb5ff85577b831a7bd21132b5"}, -] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "alembic" -version = "1.13.2" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, -] - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - -[[package]] -name = "amqp" -version = "5.2.0" -description = "Low-level AMQP client for Python (fork of amqplib)." -optional = true -python-versions = ">=3.6" -files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, -] - -[package.dependencies] -vine = ">=5.0.0,<6.0.0" - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anthropic" -version = "0.34.0" -description = "The official Python library for the anthropic API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "anthropic-0.34.0-py3-none-any.whl", hash = "sha256:4f4b3b5cb7647f5879ee72c22543a10af6da83b18c8401938053b9b4965a9595"}, - {file = "anthropic-0.34.0.tar.gz", hash = "sha256:820d6cc77cfcbbe56f0920a58f7e2bacf119b9e80f6de1bf92a422799ee4680f"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tokenizers = ">=0.13.0" -typing-extensions = ">=4.7,<5" - -[package.extras] -bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] -vertex = ["google-auth (>=2,<3)"] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = "*" -files = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "assemblyai" -version = "0.26.0" -description = "AssemblyAI Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "assemblyai-0.26.0-py3-none-any.whl", hash = "sha256:46689abfe1bf9bccd595f65314aab7deec3b4859630f6882099165862d305421"}, - {file = "assemblyai-0.26.0.tar.gz", hash = "sha256:7cd7cf3231333e9ea14a130b7a72bf710c66c5d1877bbfd68ab13ff546920e33"}, -] - -[package.dependencies] -httpx = ">=0.19.0" -pydantic = ">=1.7.0,<1.10.7 || >1.10.7" -typing-extensions = ">=3.7" -websockets = ">=11.0" - -[package.extras] -extras = ["pyaudio (>=0.2.13)"] - -[[package]] -name = "astra-assistants" -version = "2.0.24" -description = "Astra Assistants API - drop in replacement for OpenAI Assistants, powered by AstraDB" -optional = false -python-versions = "<4.0,>=3.10" -files = [ - {file = "astra_assistants-2.0.24-py3-none-any.whl", hash = "sha256:2742dd4eb60fab5f00db644d430ab5d9bee9b49b457d3d74e7e8164541826486"}, - {file = "astra_assistants-2.0.24.tar.gz", hash = "sha256:aa8a72eb4c5605964251147c55965dc2d8af1f3d62f610d2ea4b7a105098c50d"}, -] - -[package.dependencies] -aiohttp = ">=3.9.4,<4.0.0" -boto3 = ">=1.34.31,<2.0.0" -httpx = ">=0.27.0,<0.28.0" -litellm = ">=1.36.0,<2.0.0" -openai = ">=1.20.0,<2.0.0" -python-dotenv = ">=1.0.1,<2.0.0" - -[[package]] -name = "astrapy" -version = "1.4.1" -description = "AstraPy is a Pythonic SDK for DataStax Astra and its Data API" -optional = false -python-versions = "<4.0.0,>=3.8.0" -files = [ - {file = "astrapy-1.4.1-py3-none-any.whl", hash = "sha256:f2f6ca3a19cfab9422f306b3941401079fb940e286f3d17c776b71ff76eb9f73"}, - {file = "astrapy-1.4.1.tar.gz", hash = "sha256:ea4ed0ec44f9d7281d034c9bd829b0db844438424d492c9c27136456d1a82719"}, -] - -[package.dependencies] -cassio = ">=0.1.4,<0.2.0" -deprecation = ">=2.1.0,<2.2.0" -httpx = {version = ">=0.25.2,<1", extras = ["http2"]} -pymongo = ">=3" -toml = ">=0.10.2,<0.11.0" -uuid6 = ">=2024.1.12,<2024.2.0" - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncer" -version = "0.0.5" -description = "Asyncer, async and await, focused on developer experience." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "asyncer-0.0.5-py3-none-any.whl", hash = "sha256:ba06d6de3c750763868dffacf89b18d40b667605b0241d31c2ee43f188e2ab74"}, - {file = "asyncer-0.0.5.tar.gz", hash = "sha256:2979f3e04cbedfe5cfeb79027dcf7d004fcc4430a0ca0066ae20490f218ec06e"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5.0" - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "authlib" -version = "1.3.1" -description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"}, - {file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"}, -] - -[package.dependencies] -cryptography = "*" - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "backports-tarfile" -version = "1.2.0" -description = "Backport of CPython tarfile module" -optional = false -python-versions = ">=3.8" -files = [ - {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, - {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] - -[[package]] -name = "bce-python-sdk" -version = "0.9.19" -description = "BCE SDK for python" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7" -files = [ - {file = "bce_python_sdk-0.9.19-py3-none-any.whl", hash = "sha256:99f573ada57e0853873ed792ff383f44636f7c8996273af048f97801fd404386"}, - {file = "bce_python_sdk-0.9.19.tar.gz", hash = "sha256:b578f5ca545eee7daadad8c62d53ac8e341d5d2378089ce366e39037e8c1da04"}, -] - -[package.dependencies] -future = ">=0.6.0" -pycryptodome = ">=3.8.0" -six = ">=1.4.0" - -[[package]] -name = "bcrypt" -version = "4.0.1" -description = "Modern password hashing for your software and your servers" -optional = false -python-versions = ">=3.6" -files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, -] - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "billiard" -version = "4.2.0" -description = "Python multiprocessing fork with improvements and bugfixes" -optional = true -python-versions = ">=3.7" -files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, -] - -[[package]] -name = "boto3" -version = "1.34.162" -description = "The AWS SDK for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "boto3-1.34.162-py3-none-any.whl", hash = "sha256:d6f6096bdab35a0c0deff469563b87d184a28df7689790f7fe7be98502b7c590"}, - {file = "boto3-1.34.162.tar.gz", hash = "sha256:873f8f5d2f6f85f1018cbb0535b03cceddc7b655b61f66a0a56995238804f41f"}, -] - -[package.dependencies] -botocore = ">=1.34.162,<1.35.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] - -[[package]] -name = "botocore" -version = "1.34.162" -description = "Low-level, data-driven core of boto 3." -optional = false -python-versions = ">=3.8" -files = [ - {file = "botocore-1.34.162-py3-none-any.whl", hash = "sha256:2d918b02db88d27a75b48275e6fb2506e9adaaddbec1ffa6a8a0898b34e769be"}, - {file = "botocore-1.34.162.tar.gz", hash = "sha256:adc23be4fb99ad31961236342b7cbf3c0bfc62532cd02852196032e8c0d682f3"}, -] - -[package.dependencies] -jmespath = ">=0.7.1,<2.0.0" -python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} - -[package.extras] -crt = ["awscrt (==0.21.2)"] - -[[package]] -name = "build" -version = "1.2.1" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">=3.8" -files = [ - {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, - {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} -packaging = ">=19.1" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] -uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "cachetools" -version = "5.4.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, -] - -[[package]] -name = "cassandra-driver" -version = "3.29.1" -description = "DataStax Driver for Apache Cassandra" -optional = false -python-versions = "*" -files = [ - {file = "cassandra-driver-3.29.1.tar.gz", hash = "sha256:38e9c2a2f2a9664bb03f1f852d5fccaeff2163942b5db35dffcf8bf32a51cfe5"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8f175c7616a63ca48cb8bd4acc443e2a3d889964d5157cead761f23cc8db7bd"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d66398952b9cd21c40edff56e22b6d3bce765edc94b207ddb5896e7bc9aa088"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbc6f575ef109ce5d4abfa2033bf36c394032abd83e32ab671159ce68e7e17b"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f241af75696adb3e470209e2fbb498804c99e2b197d24d74774eee6784f283"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-win32.whl", hash = "sha256:54d9e651a742d6ca3d874ef8d06a40fa032d2dba97142da2d36f60c5675e39f8"}, - {file = "cassandra_driver-3.29.1-cp310-cp310-win_amd64.whl", hash = "sha256:630dc5423cd40eba0ee9db31065e2238098ff1a25a6b1bd36360f85738f26e4b"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b841d38c96bb878d31df393954863652d6d3a85f47bcc00fd1d70a5ea73023f"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19cc7375f673e215bd4cbbefae2de9f07830be7dabef55284a2d2ff8d8691efe"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b74b355be3dcafe652fffda8f14f385ccc1a8dae9df28e6080cc660da39b45f"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e6dac7eddd3f4581859f180383574068a3f113907811b4dad755a8ace4c3fbd"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-win32.whl", hash = "sha256:293a79dba417112b56320ed0013d71fd7520f5fc4a5fd2ac8000c762c6dd5b07"}, - {file = "cassandra_driver-3.29.1-cp311-cp311-win_amd64.whl", hash = "sha256:7c2374fdf1099047a6c9c8329c79d71ad11e61d9cca7de92a0f49655da4bdd8a"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4431a0c836f33a33c733c84997fbdb6398be005c4d18a8c8525c469fdc29393c"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23b08381b171a9e42ace483a82457edcddada9e8367e31677b97538cde2dc34"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4beb29a0139e63a10a5b9a3c7b72c30a4e6e20c9f0574f9d22c0d4144fe3d348"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b206423cc454a78f16b411e7cb641dddc26168ac2e18f2c13665f5f3c89868c"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-win32.whl", hash = "sha256:ac898cca7303a3a2a3070513eee12ef0f1be1a0796935c5b8aa13dae8c0a7f7e"}, - {file = "cassandra_driver-3.29.1-cp312-cp312-win_amd64.whl", hash = "sha256:4ad0c9fb2229048ad6ff8c6ddbf1fdc78b111f2b061c66237c2257fcc4a31b14"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4282c5deac462e4bb0f6fd0553a33d514dbd5ee99d0812594210080330ddd1a2"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41ca7eea069754002418d3bdfbd3dfd150ea12cb9db474ab1a01fa4679a05bcb"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6639ccb268c4dc754bc45e03551711780d0e02cb298ab26cde1f42b7bcc74f8"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9d7d3b1be24a7f113b5404186ccccc977520401303a8fe78ba34134cad2482"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-win32.whl", hash = "sha256:81c8fd556c6e1bb93577e69c1f10a3fadf7ddb93958d226ccbb72389396e9a92"}, - {file = "cassandra_driver-3.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfe70ed0f27af949de2767ea9cef4092584e8748759374a55bf23c30746c7b23"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2c03c1d834ac1a0ae39f9af297a8cd38829003ce910b08b324fb3abe488ce2b"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9a3e1e2b01f3b7a5cf75c97401bce830071d99c42464352087d7475e0161af93"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90c42006665a4e490b0766b70f3d637f36a30accbef2da35d6d4081c0e0bafc3"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c1aca41f45772f9759e8246030907d92bc35fbbdc91525a3cb9b49939b80ad7"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-win32.whl", hash = "sha256:ce4a66245d4a0c8b07fdcb6398698c2c42eb71245fb49cff39435bb702ff7be6"}, - {file = "cassandra_driver-3.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cae69ceb1b1d9383e988a1b790115253eacf7867ceb15ed2adb736e3ce981be"}, -] - -[package.dependencies] -geomet = ">=0.1,<0.3" - -[package.extras] -cle = ["cryptography (>=35.0)"] -graph = ["gremlinpython (==3.4.6)"] - -[[package]] -name = "cassio" -version = "0.1.8" -description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cassio-0.1.8-py3-none-any.whl", hash = "sha256:c09e7c884ba7227ff5277c86f3b0f31c523672ea407f56d093c7227e69c54d94"}, - {file = "cassio-0.1.8.tar.gz", hash = "sha256:4e09929506cb3dd6fad217e89846d0a1a59069afd24b82c72526ef6f2e9271af"}, -] - -[package.dependencies] -cassandra-driver = ">=3.28.0,<4.0.0" -numpy = ">=1.0" -requests = ">=2.31.0,<3.0.0" - -[[package]] -name = "celery" -version = "5.4.0" -description = "Distributed Task Queue." -optional = true -python-versions = ">=3.8" -files = [ - {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, - {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, -] - -[package.dependencies] -billiard = ">=4.2.0,<5.0" -click = ">=8.1.2,<9.0" -click-didyoumean = ">=0.3.0" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.3.4,<6.0" -python-dateutil = ">=2.8.2" -redis = {version = ">=4.5.2,<4.5.5 || >4.5.5,<6.0.0", optional = true, markers = "extra == \"redis\""} -tzdata = ">=2022.7" -vine = ">=5.1.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=2.0.2)"] -auth = ["cryptography (==42.0.5)"] -azureblockblob = ["azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (>=3.25.0,<4)"] -consul = ["python-consul2 (==0.1.5)"] -cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb (==1.14.2)"] -django = ["Django (>=2.2.28)"] -dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"] -eventlet = ["eventlet (>=0.32.0)"] -gcs = ["google-cloud-storage (>=2.10.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] -mongodb = ["pymongo[srv] (>=4.0.2)"] -msgpack = ["msgpack (==1.0.8)"] -pymemcache = ["python-memcached (>=1.61)"] -pyro = ["pyro4 (==4.82)"] -pytest = ["pytest-celery[all] (>=1.0.0)"] -redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] -s3 = ["boto3 (>=1.26.143)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem (==4.1.5)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard (==0.22.0)"] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.17.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "chroma-hnswlib" -version = "0.7.3" -description = "Chromas fork of hnswlib" -optional = false -python-versions = "*" -files = [ - {file = "chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70f897dc6218afa1d99f43a9ad5eb82f392df31f57ff514ccf4eeadecd62f544"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aef10b4952708f5a1381c124a29aead0c356f8d7d6e0b520b778aaa62a356f4"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee2d8d1529fca3898d512079144ec3e28a81d9c17e15e0ea4665697a7923253"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a4021a70e898783cd6f26e00008b494c6249a7babe8774e90ce4766dd288c8ba"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8f61fa1d417fda848e3ba06c07671f14806a2585272b175ba47501b066fe6b1"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7563be58bc98e8f0866907368e22ae218d6060601b79c42f59af4eccbbd2e0a"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51b8d411486ee70d7b66ec08cc8b9b6620116b650df9c19076d2d8b6ce2ae914"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d706782b628e4f43f1b8a81e9120ac486837fbd9bcb8ced70fe0d9b95c72d77"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:54f053dedc0e3ba657f05fec6e73dd541bc5db5b09aa8bc146466ffb734bdc86"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e607c5a71c610a73167a517062d302c0827ccdd6e259af6e4869a5c1306ffb5d"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2358a795870156af6761890f9eb5ca8cade57eb10c5f046fe94dae1faa04b9e"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea425df2e6b8a5e201fff0d922a1cc1d165b3cfe762b1408075723c8892218"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:454df3dd3e97aa784fba7cf888ad191e0087eef0fd8c70daf28b753b3b591170"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:df587d15007ca701c6de0ee7d5585dd5e976b7edd2b30ac72bc376b3c3f85882"}, -] - -[package.dependencies] -numpy = "*" - -[[package]] -name = "chromadb" -version = "0.4.24" -description = "Chroma." -optional = false -python-versions = ">=3.8" -files = [ - {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"}, - {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"}, -] - -[package.dependencies] -bcrypt = ">=4.0.1" -build = ">=1.0.3" -chroma-hnswlib = "0.7.3" -fastapi = ">=0.95.2" -grpcio = ">=1.58.0" -importlib-resources = "*" -kubernetes = ">=28.1.0" -mmh3 = ">=4.0.1" -numpy = ">=1.22.5" -onnxruntime = ">=1.14.1" -opentelemetry-api = ">=1.2.0" -opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" -opentelemetry-instrumentation-fastapi = ">=0.41b0" -opentelemetry-sdk = ">=1.2.0" -orjson = ">=3.9.12" -overrides = ">=7.3.1" -posthog = ">=2.4.0" -pulsar-client = ">=3.1.0" -pydantic = ">=1.9" -pypika = ">=0.48.9" -PyYAML = ">=6.0.0" -requests = ">=2.28" -tenacity = ">=8.2.3" -tokenizers = ">=0.13.2" -tqdm = ">=4.65.0" -typer = ">=0.9.0" -typing-extensions = ">=4.5.0" -uvicorn = {version = ">=0.18.3", extras = ["standard"]} - -[[package]] -name = "clevercsv" -version = "0.8.2" -description = "A Python package for handling messy CSV files" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "clevercsv-0.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:67ab7dc8490ed391add1f26db262d09067796be7e76948bde0a9c6f1dddb7508"}, - {file = "clevercsv-0.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcf2402578f2f1c655ed21370e668f44098d9734129804f0fba1779dab7f2c47"}, - {file = "clevercsv-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a65d9722303e3db439124418ee312fcab6a75897175842690eae94bdf51b72b"}, - {file = "clevercsv-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:755369a540d40295ea2898343c44dc8a4886e7c9e2fd5f5a780d2995a5516e1d"}, - {file = "clevercsv-0.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fe155a8e39160692869f3b9b8a8bca9ba215cc350b9c804437edaa90ede4d16"}, - {file = "clevercsv-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:1e38761cd3f1977f8298a1a4cac3981c953aaf2226c0f1cc3f1ccf2172100ba4"}, - {file = "clevercsv-0.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3502c7af7a4b7a50b923a5972a9357ae2a37aa857dd96c7489c201d104e5d0b9"}, - {file = "clevercsv-0.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ed99467ba2d47a2e1e81e990f74c7542d2cd0da120d922c5c992c17ac3ba026"}, - {file = "clevercsv-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1be9c6f2e73117a77d0b0491c07738fd177ba5e2bf996ac9a221417b223162d7"}, - {file = "clevercsv-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b942ee944264e5c4dbf276de579a502d11d3571daec97af5ebe54e6cadf2b77"}, - {file = "clevercsv-0.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a8a81c1f324e2a5589e0de9b6bd965f1dd19b54b0e9e7f97cab5edf888d486"}, - {file = "clevercsv-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:e474cc07167010c4cb6b1a65588309bc855537cae01ab63cdf61a511e69b4722"}, - {file = "clevercsv-0.8.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2bfa4fe39b3b51bcf07d2f8cf033d7ac53bac5292ef7b9a88bae7c9e6689f366"}, - {file = "clevercsv-0.8.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3e2528f89ee483878c3c8030a2d2da4eef2a8a7ac3036adad0767c1025a99df7"}, - {file = "clevercsv-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8832093b49defb2f224a98158265fe0bbee9ed6a70a8105cf8d7c4b949a8e95b"}, - {file = "clevercsv-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b68a7fbddef0e1746d3ec5e4d114e1900eb1a86d71250b0b208622daa5d2c7c"}, - {file = "clevercsv-0.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cb807e7bea5a18cca4d51d1abc58e2f975759b7a0bcb78e1677c56ac7827e9a"}, - {file = "clevercsv-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:74133c7225037052247584cf2df99b052fce4659a423c30f0ea84532e0a30924"}, - {file = "clevercsv-0.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cb9241fe5d6a2e3330c52c04fd18b7c279bbdeb7d0ecef8c4267f14336021d78"}, - {file = "clevercsv-0.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c1129f1328c0940b13b9b08a72f04c8b0a85a6a021994999f34cd3abe19ca206"}, - {file = "clevercsv-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f75f46a4d6b75380f2c0b8190fed6fbb7c1400246ce52a71c68f59baf1ec362"}, - {file = "clevercsv-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bf6058a4930fde2ff00ab82e0ec961247b6e2dd503f67a16f51382b8654cbc2"}, - {file = "clevercsv-0.8.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d9f29bb3fb4c0d35416cc0baf9221d1476f3bee4c367c3618f81ac0f45b71af"}, - {file = "clevercsv-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:17455184e44ce60bb4d21eddd477c7f505b735faff82012d5c857dd31a22e0aa"}, - {file = "clevercsv-0.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:dcf560e643c1fb37d3523a11b0dfbce0bda63ac831d8c71fa2973b262bc1603f"}, - {file = "clevercsv-0.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41513c6ff653baded084a96318e4bdc078a9cce4ff5f9b4656d49aa2421e2e74"}, - {file = "clevercsv-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:71b88f1181fba6e5f6a46292d27f068bdc50200b5660b82a770adfcfb5ae076e"}, - {file = "clevercsv-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff57768d060ac6509dd33a65fa1d2a0fbb70cd62d0075d80a96367a2a9150765"}, - {file = "clevercsv-0.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9514a169270fd095827698d1d5d80a8a3785f600441a11fb3a469ad5209eeb"}, - {file = "clevercsv-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:850f9c112377a73b0bac093df45b846513d34509ba3774fe04061ba4a4afca92"}, - {file = "clevercsv-0.8.2.tar.gz", hash = "sha256:fac1b9671bd77e7835f5fe22898df88b1a11cfd924ff71fc2d0f066065dea940"}, -] - -[package.dependencies] -chardet = ">=3.0" -packaging = ">=23.0" -regex = ">=2018.11" - -[package.extras] -dev = ["faust-cchardet (>=2.1.18)", "furo", "green", "m2r2", "pandas (>=1.0.0)", "pytest (>=2.6)", "sphinx", "tabview (>=1.4)", "termcolor", "wilderness (>=0.1.5)"] -docs = ["furo", "m2r2", "sphinx"] -full = ["faust-cchardet (>=2.1.18)", "pandas (>=1.0.0)", "tabview (>=1.4)", "wilderness (>=0.1.5)"] -precommit = ["wilderness (>=0.1.5)"] -tests = ["faust-cchardet (>=2.1.18)", "pandas (>=1.0.0)", "tabview (>=1.4)", "wilderness (>=0.1.5)"] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-didyoumean" -version = "0.3.1" -description = "Enables git-like *did-you-mean* feature in click" -optional = true -python-versions = ">=3.6.2" -files = [ - {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, - {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, -] - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = true -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.3.0" -description = "REPL plugin for Click" -optional = true -python-versions = ">=3.6" -files = [ - {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, - {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, -] - -[package.dependencies] -click = ">=7.0" -prompt-toolkit = ">=3.0.36" - -[package.extras] -testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] - -[[package]] -name = "codespell" -version = "2.3.0" -description = "Codespell" -optional = false -python-versions = ">=3.8" -files = [ - {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, - {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, -] - -[package.extras] -dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] -hard-encoding-detection = ["chardet"] -toml = ["tomli"] -types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] - -[[package]] -name = "cohere" -version = "5.8.0" -description = "" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cohere-5.8.0-py3-none-any.whl", hash = "sha256:f87f709be6dfe3dce57bef0dd5e90924e8828fb8d334c96fc27663b6a7298c6b"}, - {file = "cohere-5.8.0.tar.gz", hash = "sha256:c4e1ab064d66cc0170091f614b4ea22f55e079f2c7fe9e0de8752fd46f8d2a70"}, -] - -[package.dependencies] -boto3 = ">=1.34.0,<2.0.0" -fastavro = ">=1.9.4,<2.0.0" -httpx = ">=0.21.2" -httpx-sse = "0.4.0" -parameterized = ">=0.9.0,<0.10.0" -pydantic = ">=1.9.2" -pydantic-core = ">=2.18.2,<3.0.0" -requests = ">=2.0.0,<3.0.0" -tokenizers = ">=0.15,<1" -types-requests = ">=2.0.0,<3.0.0" -typing_extensions = ">=4.0.0" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coloredlogs" -version = "15.0.1" -description = "Colored terminal output for Python's logging module" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, - {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, -] - -[package.dependencies] -humanfriendly = ">=9.1" - -[package.extras] -cron = ["capturer (>=2.4)"] - -[[package]] -name = "colorlog" -version = "6.8.2" -description = "Add colours to the output of Python's logging module." -optional = false -python-versions = ">=3.6" -files = [ - {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, - {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -development = ["black", "flake8", "mypy", "pytest", "types-colorama"] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "composio-core" -version = "0.5.3" -description = "Core package to act as a bridge between composio platform and other services." -optional = false -python-versions = "<4,>=3.9" -files = [ - {file = "composio_core-0.5.3-py3-none-any.whl", hash = "sha256:9a0fd28e27a2310b2d510ab131f99bb99ee9dc58bb19bdc47588469ef90f761a"}, - {file = "composio_core-0.5.3.tar.gz", hash = "sha256:15315a38adc6af4ffd7addac9cf998436db85f9b3fefe8b56774055f1e559a9a"}, -] - -[package.dependencies] -aiohttp = "*" -click = "*" -docker = ">=7.1.0" -e2b-code-interpreter = "*" -fastapi = "*" -gql = "*" -importlib-metadata = ">=4.8.1" -inflection = ">=0.5.1" -jsonref = ">=1.1.0" -jsonschema = ">=4.21.1,<5" -paramiko = ">=3.4.1" -pydantic = ">=2.6.4,<3" -pyperclip = ">=1.8.2,<2" -pysher = "1.0.8" -requests = ">=2.31.0,<3" -requests-toolbelt = "*" -rich = ">=13.7.1,<14" -sentry-sdk = ">=2.0.0" -uvicorn = "*" - -[package.extras] -all = ["diskcache", "flake8", "networkx", "pathspec", "pygments", "ruff", "transformers", "tree-sitter (==0.21.3)", "tree-sitter-languages"] - -[[package]] -name = "composio-langchain" -version = "0.5.3" -description = "Use Composio to get an array of tools with your LangChain agent." -optional = false -python-versions = "<4,>=3.9" -files = [ - {file = "composio_langchain-0.5.3-py3-none-any.whl", hash = "sha256:a4d35e551675943f3721bee94c58230f22d85d6a9947052f9d9577a582a0d618"}, - {file = "composio_langchain-0.5.3.tar.gz", hash = "sha256:0168f2919c0d3241a38b69bc11d0b339e68c09c2ce7f0cc7efe19d41932bfa48"}, -] - -[package.dependencies] -composio-core = "0.5.3" -langchain = ">=0.1.0" -langchain-openai = ">=0.0.2.post1" -langchainhub = ">=0.1.15" -pydantic = ">=2.6.4" - -[[package]] -name = "coolname" -version = "2.2.0" -description = "Random name and slug generator" -optional = false -python-versions = "*" -files = [ - {file = "coolname-2.2.0-py2.py3-none-any.whl", hash = "sha256:4d1563186cfaf71b394d5df4c744f8c41303b6846413645e31d31915cdeb13e8"}, - {file = "coolname-2.2.0.tar.gz", hash = "sha256:6c5d5731759104479e7ca195a9b64f7900ac5bead40183c09323c7d0be9e75c7"}, -] - -[[package]] -name = "couchbase" -version = "4.3.0" -description = "Python Client for Couchbase" -optional = true -python-versions = ">=3.7" -files = [ - {file = "couchbase-4.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c55a036b84eb2e248c4c581e6b867087f361eaa7d86d2d5dfdaf991f12666ee0"}, - {file = "couchbase-4.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f8e9c379ddd304ebbedf51d541935ca433609236d4e8b6ae6dda383ae0f83852"}, - {file = "couchbase-4.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:043ae64f6d4df9ca753cfa507873ca525eaa9bd5fa8c63a37569e844ff1da676"}, - {file = "couchbase-4.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84c0ab168d62b19df4314e781f8b34d276ac08c5a34738a5451d49070895e1b4"}, - {file = "couchbase-4.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:af3a05e6e7f03d3711062896f1ef4572d92b1df138bd2cbe8c359449d32f51ac"}, - {file = "couchbase-4.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fbeec2297e64d251476fbc4d082b317ef87ce7a09b2315267d0db34b6c3a78f1"}, - {file = "couchbase-4.3.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:b04f542610e99691cf76bf7fe3a2106f48211d94c8841db3fdf32a6d28ec0b39"}, - {file = "couchbase-4.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f599e13e949a6c1b5763de37845b9daaefd945c509f11adefa8c01ccd5b65f1"}, - {file = "couchbase-4.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4fd502d5a42eefd9715ac4b70c8888f9c0d7f53c072328b61908edab8f1cba28"}, - {file = "couchbase-4.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:758954ab6f6eb613dcf198e2f843790b49843a5ff881bd2ef7e8d52135001b3e"}, - {file = "couchbase-4.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fecdd3cb13d3faee3114d5361d7a0b59f0baaeedee56cf65fcc7d6c3adb3234f"}, - {file = "couchbase-4.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:2201862ce7acbe5026da6bb44a4b5a65a1ad42aab5b3a27b47311b511676bf36"}, - {file = "couchbase-4.3.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e45f56483dcc17ff38dd871ed477edbf5d590ade75bf56aca4d4faae59a04abc"}, - {file = "couchbase-4.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:01372e361cd1157f3de82b914d04a9065e0c5e9e7c13437a27f596ed680825ac"}, - {file = "couchbase-4.3.0-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4a7f0d9fd6251cf32a17a5cde666a902fcad0da440c47beba6a15b129f76d4e5"}, - {file = "couchbase-4.3.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:63cc1bd3d7b8748e478229b622a0d247ab80357e32f2b8d52bc01321701094c2"}, - {file = "couchbase-4.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c687aa2e24ca83c75b6544830eec82dc5733e40c8faeabe7f69dff51b4cd5013"}, - {file = "couchbase-4.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:38cb3734d7e77cd4fff287feb1f424d8626963b7e878317e8ca43346c37ff52c"}, - {file = "couchbase-4.3.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:35da8ffd1f02ffd1d1bd44f07546e06ce72a163a0989d0554e8c3ab5a5602bef"}, - {file = "couchbase-4.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b65ad1fd1f1ef790698feb8cadfc20d7694826bd1cbc32516c9bd5ebf16a0db8"}, - {file = "couchbase-4.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2527f78dcc7013a9c3946f50e5377d98efc77d0d9936ddc1561c0f46d08c1f9"}, - {file = "couchbase-4.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0679715dedd057d5dae7dfee07eb833d0bd3d951a82d1a76590db534186c1d5c"}, - {file = "couchbase-4.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2335517a5e772530e67a496b308b97939360bd388deced850bde1cba2229f7de"}, - {file = "couchbase-4.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:685cf11f9d44f25ee144c17a28f459976507066493e367ba4d899bb57c475f2a"}, - {file = "couchbase-4.3.0.tar.gz", hash = "sha256:70f6bbbc4818413c4c63eed411ba65c8b980e138c6093ac0eb38f1d7c9ea3bc2"}, -] - -[[package]] -name = "coverage" -version = "7.6.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "crewai" -version = "0.36.1" -description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks." -optional = false -python-versions = "<=3.13,>=3.10" -files = [ - {file = "crewai-0.36.1-py3-none-any.whl", hash = "sha256:dbaa50d102542ea0c790bd62511b35234b2f5fa8d2333a6598beb84f407f0e00"}, - {file = "crewai-0.36.1.tar.gz", hash = "sha256:ea50ec5d3ef2df85e1b520efd9331bebb49ed7143e6cd1feec645da49217d2b0"}, -] - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -click = ">=8.1.7,<9.0.0" -crewai-tools = {version = ">=0.4.7,<0.5.0", optional = true, markers = "extra == \"tools\""} -embedchain = ">=0.1.114,<0.2.0" -instructor = "1.3.3" -jsonref = ">=1.1.0,<2.0.0" -langchain = ">0.2,<=0.3" -openai = ">=1.13.3,<2.0.0" -opentelemetry-api = ">=1.22.0,<2.0.0" -opentelemetry-exporter-otlp-proto-http = ">=1.22.0,<2.0.0" -opentelemetry-sdk = ">=1.22.0,<2.0.0" -pydantic = ">=2.4.2,<3.0.0" -python-dotenv = ">=1.0.0,<2.0.0" -regex = ">=2023.12.25,<2024.0.0" - -[package.extras] -agentops = ["agentops (>=0.1.9,<0.2.0)"] -tools = ["crewai-tools (>=0.4.7,<0.5.0)"] - -[[package]] -name = "crewai-tools" -version = "0.4.26" -description = "Set of tools for the crewAI framework" -optional = false -python-versions = "<=3.13,>=3.10" -files = [ - {file = "crewai_tools-0.4.26-py3-none-any.whl", hash = "sha256:c5d062a613f65bda4af2621eba66b14a6227b0e1b12ef385828a28259f5d6ce5"}, - {file = "crewai_tools-0.4.26.tar.gz", hash = "sha256:f73569b543a886f0ef52cc8ae5dc7a3a287be9f7614518e4c64886454aeb5c4b"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.12.3,<5.0.0" -chromadb = ">=0.4.22,<0.5.0" -docker = ">=7.1.0,<8.0.0" -docx2txt = ">=0.8,<0.9" -embedchain = ">=0.1.114,<0.2.0" -lancedb = ">=0.5.4,<0.6.0" -langchain = ">0.2,<=0.3" -openai = ">=1.12.0,<2.0.0" -pydantic = ">=2.6.1,<3.0.0" -pyright = ">=1.1.350,<2.0.0" -pytest = ">=8.0.0,<9.0.0" -pytube = ">=15.0.0,<16.0.0" -requests = ">=2.31.0,<3.0.0" -selenium = ">=4.18.1,<5.0.0" - -[[package]] -name = "cryptography" -version = "42.0.8" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "ctransformers" -version = "0.2.27" -description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." -optional = true -python-versions = "*" -files = [ - {file = "ctransformers-0.2.27-py3-none-any.whl", hash = "sha256:6a3ba47556471850d95fdbc59299a82ab91c9dc8b40201c5e7e82d71360772d9"}, - {file = "ctransformers-0.2.27.tar.gz", hash = "sha256:25653d4be8a5ed4e2d3756544c1e9881bf95404be5371c3ed506a256c28663d5"}, -] - -[package.dependencies] -huggingface-hub = "*" -py-cpuinfo = ">=9.0.0,<10.0.0" - -[package.extras] -cuda = ["nvidia-cublas-cu12", "nvidia-cuda-runtime-cu12"] -gptq = ["exllama (==0.1.0)"] -tests = ["pytest"] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - -[[package]] -name = "datasets" -version = "2.19.2" -description = "HuggingFace community-driven open-source library of datasets" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "datasets-2.19.2-py3-none-any.whl", hash = "sha256:e07ff15d75b1af75c87dd96323ba2a361128d495136652f37fd62f918d17bb4e"}, - {file = "datasets-2.19.2.tar.gz", hash = "sha256:eccb82fb3bb5ee26ccc6d7a15b7f1f834e2cc4e59b7cff7733a003552bad51ef"}, -] - -[package.dependencies] -aiohttp = "*" -dill = ">=0.3.0,<0.3.9" -filelock = "*" -fsspec = {version = ">=2023.1.0,<=2024.3.1", extras = ["http"]} -huggingface-hub = ">=0.21.2" -multiprocess = "*" -numpy = ">=1.17" -packaging = "*" -pandas = "*" -pyarrow = ">=12.0.0" -pyarrow-hotfix = "*" -pyyaml = ">=5.1" -requests = ">=2.32.1" -tqdm = ">=4.62.1" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam (>=2.26.0)"] -audio = ["librosa", "soundfile (>=0.12.1)"] -benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] -dev = ["Pillow (>=9.4.0)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -docs = ["s3fs", "tensorflow (>=2.6.0)", "torch", "transformers"] -jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] -quality = ["ruff (>=0.3.0)"] -s3 = ["s3fs"] -tensorflow = ["tensorflow (>=2.6.0)"] -tensorflow-gpu = ["tensorflow (>=2.6.0)"] -tests = ["Pillow (>=9.4.0)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -torch = ["torch"] -vision = ["Pillow (>=9.4.0)"] - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "deepdiff" -version = "7.0.1" -description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -optional = false -python-versions = ">=3.8" -files = [ - {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"}, - {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"}, -] - -[package.dependencies] -ordered-set = ">=4.1.0,<4.2.0" - -[package.extras] -cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"] -optimize = ["orjson"] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "deprecation" -version = "2.1.0" -description = "A library to handle automated deprecations" -optional = false -python-versions = "*" -files = [ - {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, - {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, -] - -[package.dependencies] -packaging = "*" - -[[package]] -name = "dictdiffer" -version = "0.9.0" -description = "Dictdiffer is a library that helps you to diff and patch dictionaries." -optional = false -python-versions = "*" -files = [ - {file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"}, - {file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"}, -] - -[package.extras] -all = ["Sphinx (>=3)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "tox (>=3.7.0)"] -docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"] -numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"] -tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "tox (>=3.7.0)"] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "diskcache" -version = "5.6.3" -description = "Disk Cache -- Disk and file backed persistent cache." -optional = false -python-versions = ">=3" -files = [ - {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, - {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, -] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "docker" -version = "7.1.0" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.8" -files = [ - {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, - {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, -] - -[package.dependencies] -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" - -[package.extras] -dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] -docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] -ssh = ["paramiko (>=2.4.3)"] -websockets = ["websocket-client (>=1.3.0)"] - -[[package]] -name = "docstring-parser" -version = "0.16" -description = "Parse Python docstrings in reST, Google and Numpydoc format" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, - {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, -] - -[[package]] -name = "docx2txt" -version = "0.8" -description = "A pure python-based utility to extract text and images from docx files." -optional = false -python-versions = "*" -files = [ - {file = "docx2txt-0.8.tar.gz", hash = "sha256:2c06d98d7cfe2d3947e5760a57d924e3ff07745b379c8737723922e7009236e5"}, -] - -[[package]] -name = "dspy-ai" -version = "2.4.13" -description = "DSPy" -optional = false -python-versions = ">=3.9" -files = [ - {file = "dspy-ai-2.4.13.tar.gz", hash = "sha256:0ed5648d8267b6a4ebe5b72ec5dbcca9fa194d800885a0182cad93c312cd3166"}, - {file = "dspy_ai-2.4.13-py3-none-any.whl", hash = "sha256:b43aa117b4b6fcb009274f61adcfb0a1dbe1cbb4a370da3bd14cd4d230f17665"}, -] - -[package.dependencies] -backoff = "*" -datasets = "*" -joblib = ">=1.3,<2.0" -openai = ">=0.28.1,<2.0.0" -optuna = "*" -pandas = "*" -pydantic = ">=2.0,<3.0" -regex = "*" -requests = "*" -structlog = "*" -tqdm = "*" -ujson = "*" - -[package.extras] -chromadb = ["chromadb (>=0.4.14,<0.5.0)"] -faiss-cpu = ["faiss-cpu", "sentence-transformers"] -fastembed = ["fastembed"] -google-vertex-ai = ["google-cloud-aiplatform (==1.43.0)"] -groq = ["groq (>=0.8.0,<0.9.0)"] -marqo = ["marqo (>=3.1.0,<3.2.0)"] -milvus = ["pymilvus (>=2.3.7,<2.4.0)"] -mongodb = ["pymongo (>=3.12.0,<3.13.0)"] -myscale = ["clickhouse-connect"] -pinecone = ["pinecone-client (>=2.2.4,<2.3.0)"] -qdrant = ["fastembed", "qdrant-client"] -snowflake = ["snowflake-snowpark-python"] -weaviate = ["weaviate-client (>=4.6.5,<4.7.0)"] - -[[package]] -name = "duckdb" -version = "1.0.0" -description = "DuckDB in-process database" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4a8ce2d1f9e1c23b9bab3ae4ca7997e9822e21563ff8f646992663f66d050211"}, - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:19797670f20f430196e48d25d082a264b66150c264c1e8eae8e22c64c2c5f3f5"}, - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b71c342090fe117b35d866a91ad6bffce61cd6ff3e0cff4003f93fc1506da0d8"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dd69f44ad212c35ae2ea736b0e643ea2b70f204b8dff483af1491b0e2a4cec"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8da5f293ecb4f99daa9a9352c5fd1312a6ab02b464653a0c3a25ab7065c45d4d"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3207936da9967ddbb60644ec291eb934d5819b08169bc35d08b2dedbe7068c60"}, - {file = "duckdb-1.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1128d6c9c33e883b1f5df6b57c1eb46b7ab1baf2650912d77ee769aaa05111f9"}, - {file = "duckdb-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:02310d263474d0ac238646677feff47190ffb82544c018b2ff732a4cb462c6ef"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:75586791ab2702719c284157b65ecefe12d0cca9041da474391896ddd9aa71a4"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:83bb415fc7994e641344f3489e40430ce083b78963cb1057bf714ac3a58da3ba"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:bee2e0b415074e84c5a2cefd91f6b5ebeb4283e7196ba4ef65175a7cef298b57"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa5a4110d2a499312609544ad0be61e85a5cdad90e5b6d75ad16b300bf075b90"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fa389e6a382d4707b5f3d1bc2087895925ebb92b77e9fe3bfb23c9b98372fdc"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ede6f5277dd851f1a4586b0c78dc93f6c26da45e12b23ee0e88c76519cbdbe0"}, - {file = "duckdb-1.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0b88cdbc0d5c3e3d7545a341784dc6cafd90fc035f17b2f04bf1e870c68456e5"}, - {file = "duckdb-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd1693cdd15375156f7fff4745debc14e5c54928589f67b87fb8eace9880c370"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c65a7fe8a8ce21b985356ee3ec0c3d3b3b2234e288e64b4cfb03356dbe6e5583"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:e5a8eda554379b3a43b07bad00968acc14dd3e518c9fbe8f128b484cf95e3d16"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:a1b6acdd54c4a7b43bd7cb584975a1b2ff88ea1a31607a2b734b17960e7d3088"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a677bb1b6a8e7cab4a19874249d8144296e6e39dae38fce66a80f26d15e670df"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752e9d412b0a2871bf615a2ede54be494c6dc289d076974eefbf3af28129c759"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3aadb99d098c5e32d00dc09421bc63a47134a6a0de9d7cd6abf21780b678663c"}, - {file = "duckdb-1.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83b7091d4da3e9301c4f9378833f5ffe934fb1ad2b387b439ee067b2c10c8bb0"}, - {file = "duckdb-1.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:6a8058d0148b544694cb5ea331db44f6c2a00a7b03776cc4dd1470735c3d5ff7"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40cb20e5ee19d44bc66ec99969af791702a049079dc5f248c33b1c56af055f4"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7bce1bc0de9af9f47328e24e6e7e39da30093179b1c031897c042dd94a59c8e"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8355507f7a04bc0a3666958f4414a58e06141d603e91c0fa5a7c50e49867fb6d"}, - {file = "duckdb-1.0.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:39f1a46f5a45ad2886dc9b02ce5b484f437f90de66c327f86606d9ba4479d475"}, - {file = "duckdb-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d29ba477b27ae41676b62c8fae8d04ee7cbe458127a44f6049888231ca58fa"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:1bea713c1925918714328da76e79a1f7651b2b503511498ccf5e007a7e67d49e"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:bfe67f3bcf181edbf6f918b8c963eb060e6aa26697d86590da4edc5707205450"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:dbc6093a75242f002be1d96a6ace3fdf1d002c813e67baff52112e899de9292f"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba1881a2b11c507cee18f8fd9ef10100be066fddaa2c20fba1f9a664245cd6d8"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:445d0bb35087c522705c724a75f9f1c13f1eb017305b694d2686218d653c8142"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:224553432e84432ffb9684f33206572477049b371ce68cc313a01e214f2fbdda"}, - {file = "duckdb-1.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d3914032e47c4e76636ad986d466b63fdea65e37be8a6dfc484ed3f462c4fde4"}, - {file = "duckdb-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:af9128a2eb7e1bb50cd2c2020d825fb2946fdad0a2558920cd5411d998999334"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dd2659a5dbc0df0de68f617a605bf12fe4da85ba24f67c08730984a0892087e8"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:ac5a4afb0bc20725e734e0b2c17e99a274de4801aff0d4e765d276b99dad6d90"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c5a53bee3668d6e84c0536164589d5127b23d298e4c443d83f55e4150fafe61"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b980713244d7708b25ee0a73de0c65f0e5521c47a0e907f5e1b933d79d972ef6"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cbd4f9fe7b7a56eff96c3f4d6778770dd370469ca2212eddbae5dd63749db5"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed228167c5d49888c5ef36f6f9cbf65011c2daf9dcb53ea8aa7a041ce567b3e4"}, - {file = "duckdb-1.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46d8395fbcea7231fd5032a250b673cc99352fef349b718a23dea2c0dd2b8dec"}, - {file = "duckdb-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6ad1fc1a4d57e7616944166a5f9417bdbca1ea65c490797e3786e3a42e162d8a"}, - {file = "duckdb-1.0.0.tar.gz", hash = "sha256:a2a059b77bc7d5b76ae9d88e267372deff19c291048d59450c431e166233d453"}, -] - -[[package]] -name = "e2b" -version = "0.17.1" -description = "E2B SDK that give agents cloud environments" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "e2b-0.17.1-py3-none-any.whl", hash = "sha256:c0698fd03b639f4dd88eed167a98af4d450668c0ae9805122a98f62f36f2491f"}, - {file = "e2b-0.17.1.tar.gz", hash = "sha256:9e69a059cb73334bac7db189287552af9321fb3ac8ced52557907e10c4310733"}, -] - -[package.dependencies] -aenum = ">=3.1.11" -aiohttp = ">=3.8.4" -jsonrpcclient = ">=4.0.3" -pydantic = "*" -python-dateutil = ">=2.8.2" -requests = ">=2.31.0" -typing-extensions = ">=4.8.0" -urllib3 = ">=1.25.3" -websockets = ">=11.0.3" - -[[package]] -name = "e2b-code-interpreter" -version = "0.0.10" -description = "E2B Code Interpreter - Stateful code execution" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "e2b_code_interpreter-0.0.10-py3-none-any.whl", hash = "sha256:85700fad734334678a11e6b8cfea9dfd5af7f2f16b8f9a5950cf06b1877c02da"}, - {file = "e2b_code_interpreter-0.0.10.tar.gz", hash = "sha256:2882197b819e657c5b03083b2330c8e06117e7a584ca93e6d1acded9da517622"}, -] - -[package.dependencies] -e2b = ">=0.17.1" -pydantic = "*" -websocket-client = ">=1.7.0,<2.0.0" - -[[package]] -name = "ecdsa" -version = "0.19.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" -files = [ - {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, - {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - -[[package]] -name = "elastic-transport" -version = "8.15.0" -description = "Transport classes and utilities shared among Python Elastic client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "elastic_transport-8.15.0-py3-none-any.whl", hash = "sha256:d7080d1dada2b4eee69e7574f9c17a76b42f2895eff428e562f94b0360e158c0"}, - {file = "elastic_transport-8.15.0.tar.gz", hash = "sha256:85d62558f9baafb0868c801233a59b235e61d7b4804c28c2fadaa866b6766233"}, -] - -[package.dependencies] -certifi = "*" -urllib3 = ">=1.26.2,<3" - -[package.extras] -develop = ["aiohttp", "furo", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] - -[[package]] -name = "elasticsearch" -version = "8.15.0" -description = "Python client for Elasticsearch" -optional = false -python-versions = ">=3.8" -files = [ - {file = "elasticsearch-8.15.0-py3-none-any.whl", hash = "sha256:7ec4d0771b8f6f76c6331e61b408261f9307264735ec7f2539f580824faaafe3"}, - {file = "elasticsearch-8.15.0.tar.gz", hash = "sha256:d4702b4fa698ef154c6f1187988192343fb5505aee97fc4450a6d20df1683f35"}, -] - -[package.dependencies] -elastic-transport = ">=8.13,<9" - -[package.extras] -async = ["aiohttp (>=3,<4)"] -dev = ["aiohttp", "black", "build", "coverage", "isort", "jinja2", "mapbox-vector-tile", "nox", "numpy", "orjson", "pandas", "pyarrow", "pytest", "pytest-asyncio", "pytest-cov", "python-dateutil", "pyyaml (>=5.4)", "requests (>=2,<3)", "simsimd", "twine", "unasync"] -docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] -orjson = ["orjson (>=3)"] -pyarrow = ["pyarrow (>=1)"] -requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] -vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] - -[[package]] -name = "email-validator" -version = "2.2.0" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - -[[package]] -name = "embedchain" -version = "0.1.120" -description = "Simplest open source retrieval (RAG) framework" -optional = false -python-versions = "<=3.13,>=3.9" -files = [ - {file = "embedchain-0.1.120-py3-none-any.whl", hash = "sha256:9eaa946f8a7b394080c56067849d7852a78361dd5e7b099ebf42989c07a1814d"}, - {file = "embedchain-0.1.120.tar.gz", hash = "sha256:6061c261a054d677e5b9c4062146d45e04e8572c67152120913d61aee4c22ae3"}, -] - -[package.dependencies] -alembic = ">=1.13.1,<2.0.0" -beautifulsoup4 = ">=4.12.2,<5.0.0" -chromadb = ">=0.4.24,<0.5.0" -cohere = ">=5.3,<6.0" -google-cloud-aiplatform = ">=1.26.1,<2.0.0" -gptcache = ">=0.1.43,<0.2.0" -langchain = ">0.2,<=0.3" -langchain-cohere = ">=0.1.4,<0.2.0" -langchain-community = ">=0.2.6,<0.3.0" -langchain-openai = ">=0.1.7,<0.2.0" -mem0ai = ">=0.0.9,<0.0.10" -openai = ">=1.1.1" -posthog = ">=3.0.2,<4.0.0" -pypdf = ">=4.0.1,<5.0.0" -pysbd = ">=0.3.4,<0.4.0" -python-dotenv = ">=1.0.0,<2.0.0" -rich = ">=13.7.0,<14.0.0" -schema = ">=0.7.5,<0.8.0" -sqlalchemy = ">=2.0.27,<3.0.0" -tiktoken = ">=0.7.0,<0.8.0" - -[package.extras] -aws = ["langchain-aws (>=0.1.10,<0.2.0)"] -elasticsearch = ["elasticsearch (>=8.9.0,<9.0.0)"] -gmail = ["google-api-core (>=2.15.0,<3.0.0)", "google-api-python-client (>=2.111.0,<3.0.0)", "google-auth (>=2.25.2,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)", "requests (>=2.31.0,<3.0.0)"] -google = ["google-generativeai (>=0.3.0,<0.4.0)"] -googledrive = ["google-api-python-client (>=2.111.0,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)"] -lancedb = ["lancedb (>=0.6.2,<0.7.0)"] -llama2 = ["replicate (>=0.15.4,<0.16.0)"] -milvus = ["pymilvus (==2.4.3)"] -mistralai = ["langchain-mistralai (>=0.1.9,<0.2.0)"] -mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"] -opensearch = ["opensearch-py (==2.3.1)"] -opensource = ["gpt4all (==2.0.2)", "sentence-transformers (>=2.2.2,<3.0.0)", "torch (==2.3.0)"] -postgres = ["psycopg (>=3.1.12,<4.0.0)", "psycopg-binary (>=3.1.12,<4.0.0)", "psycopg-pool (>=3.1.8,<4.0.0)"] -qdrant = ["qdrant-client (>=1.6.3,<2.0.0)"] -together = ["together (>=1.2.1,<2.0.0)"] -vertexai = ["langchain-google-vertexai (>=1.0.6,<2.0.0)"] -weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"] - -[[package]] -name = "emoji" -version = "2.12.1" -description = "Emoji for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"}, - {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"}, -] - -[package.dependencies] -typing-extensions = ">=4.7.0" - -[package.extras] -dev = ["coverage", "pytest (>=7.4.4)"] - -[[package]] -name = "environs" -version = "9.5.0" -description = "simplified environment variable parsing" -optional = false -python-versions = ">=3.6" -files = [ - {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, - {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, -] - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "execnet" -version = "2.1.1" -description = "execnet: rapid multi-Python deployment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, - {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, -] - -[package.extras] -testing = ["hatch", "pre-commit", "pytest", "tox"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "faiss-cpu" -version = "1.8.0.post1" -description = "A library for efficient similarity search and clustering of dense vectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:fd84721eb599aa1da19b1b36345bb8705a60bb1d2887bbbc395a29e3d36a1a62"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b78ff9079d15fd0f156bf5dd8a2975a8abffac1854a86ece263eec1500a2e836"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de25c943d1789e35fe06a20884c88cd32aedbb1a33bb8da2238cdea7bd9633f"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adae0f1b144e7216da696f14bc4991ca4300c94baaa59247c3d322588e661c95"}, - {file = "faiss_cpu-1.8.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:00345290680a444a4b4cb2d98a3844bb5c401a2160fee547c7631d759fd2ec3e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:8d4bade10cb63e9f9ff261751edd7eb097b1f4bf30be4d0d25d6f688559d795e"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20bd43eca3b7d77e71ea56b7a558cc28e900d8abff417eb285e2d92e95d934d4"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8542a87743a7f94ac656fd3e9592ad57e58b04d961ad2fe654a22a8ca59defdb"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed46928de3dc20170b10fec89c54075a11383c2aaf4f119c63e0f6ae5a507d74"}, - {file = "faiss_cpu-1.8.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:4fa5fc8ea210b919aa469e27d6687e50052db906e7fec3f2257178b1384fa18b"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:96aec0d08a3099883af3a9b6356cfe736e8bd879318a940a27e9d1ae6f33d788"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:92b06147fa84732ecdc965922e8ef50dc7011ef8be65821ff4abb2118cb5dce0"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:709ef9394d1148aef70dbe890edbde8c282a4a2e06a8b69ab64f65e90f5ba572"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:327a9c30971bf72cd8392b15eb4aff5d898c453212eae656dfaa3ba555b9ca0c"}, - {file = "faiss_cpu-1.8.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:8756f1d93faba56349883fa2f5d47fe36bb2f11f789200c6b1c691ef805485f2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f4a3045909c447bf1955b70083891e80f2c87c5427f20cae25245e08ec5c9e52"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8842b7fc921ca1fafdb0845f2ba029e79df04eebae72ab135239f93478a9b7a2"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d5a9799634e32c3862d5436d1e78112ed9a38f319e4523f5916e55d86adda8f"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a70923b0fbbb40f647e20bcbcbfd472277e6d84bb23ff12d2a94b6841806b55"}, - {file = "faiss_cpu-1.8.0.post1-cp38-cp38-win_amd64.whl", hash = "sha256:ce652df3c4dd50c88ac9235d072f30ce60694dc422c5f523bbbcab320e8f3097"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:83ef04b17b19189dd6601a941bdf4bfa9de0740dbcd80305aeba51a1b1955f80"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c50c8697077470ede7f1939ef8dc8a846ec19cf1893b543f6b67f9af03b0a122"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ce428a7a67fe5c64047280e5e12a8dbdecf7002f9d127b26cf1db354e9fe76"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3b36b80380bae523e3198cfb4a137867055945ce7bf10d18fe9f0284f2fb47"}, - {file = "faiss_cpu-1.8.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:4fcc67a2353f08a20c1ab955de3cde14ef3b447761b26244a5aa849c15cbc9b3"}, - {file = "faiss_cpu-1.8.0.post1.tar.gz", hash = "sha256:5686af34414678c3d49c4fa8d774df7156e9cb48d7029071e56230e74b01cc13"}, -] - -[package.dependencies] -numpy = ">=1.0,<2.0" -packaging = "*" - -[[package]] -name = "fake-useragent" -version = "1.5.1" -description = "Up-to-date simple useragent faker with real world database" -optional = false -python-versions = "*" -files = [ - {file = "fake-useragent-1.5.1.tar.gz", hash = "sha256:6387269f5a2196b5ba7ed8935852f75486845a1c95c50e72460e6a8e762f5c49"}, - {file = "fake_useragent-1.5.1-py3-none-any.whl", hash = "sha256:57415096557c8a4e23b62a375c21c55af5fd4ba30549227f562d2c4f5b60e3b3"}, -] - -[[package]] -name = "fastapi" -version = "0.111.1" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf"}, - {file = "fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413"}, -] - -[package.dependencies] -email_validator = ">=2.0.0" -fastapi-cli = ">=0.0.2" -httpx = ">=0.23.0" -jinja2 = ">=2.11.2" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -python-multipart = ">=0.0.7" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" -uvicorn = {version = ">=0.12.0", extras = ["standard"]} - -[package.extras] -all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastapi-cli" -version = "0.0.5" -description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46"}, - {file = "fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f"}, -] - -[package.dependencies] -typer = ">=0.12.3" -uvicorn = {version = ">=0.15.0", extras = ["standard"]} - -[package.extras] -standard = ["uvicorn[standard] (>=0.15.0)"] - -[[package]] -name = "fastavro" -version = "1.9.5" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61253148e95dd2b6457247b441b7555074a55de17aef85f5165bfd5facf600fc"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b604935d671ad47d888efc92a106f98e9440874108b444ac10e28d643109c937"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0adbf4956fd53bd74c41e7855bb45ccce953e0eb0e44f5836d8d54ad843f9944"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53d838e31457db8bf44460c244543f75ed307935d5fc1d93bc631cc7caef2082"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:07b6288e8681eede16ff077632c47395d4925c2f51545cd7a60f194454db2211"}, - {file = "fastavro-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:ef08cf247fdfd61286ac0c41854f7194f2ad05088066a756423d7299b688d975"}, - {file = "fastavro-1.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c52d7bb69f617c90935a3e56feb2c34d4276819a5c477c466c6c08c224a10409"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e05969956003df8fa4491614bc62fe40cec59e94d06e8aaa8d8256ee3aab82"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e6df8527493a9f0d9a8778df82bab8b1aa6d80d1b004e5aec0a31dc4dc501c"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27820da3b17bc01cebb6d1687c9d7254b16d149ef458871aaa207ed8950f3ae6"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:195a5b8e33eb89a1a9b63fa9dce7a77d41b3b0cd785bac6044df619f120361a2"}, - {file = "fastavro-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:be612c109efb727bfd36d4d7ed28eb8e0506617b7dbe746463ebbf81e85eaa6b"}, - {file = "fastavro-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b133456c8975ec7d2a99e16a7e68e896e45c821b852675eac4ee25364b999c14"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf586373c3d1748cac849395aad70c198ee39295f92e7c22c75757b5c0300fbe"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:724ef192bc9c55d5b4c7df007f56a46a21809463499856349d4580a55e2b914c"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bfd11fe355a8f9c0416803afac298960eb4c603a23b1c74ff9c1d3e673ea7185"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9827d1654d7bcb118ef5efd3e5b2c9ab2a48d44dac5e8c6a2327bc3ac3caa828"}, - {file = "fastavro-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:d84b69dca296667e6137ae7c9a96d060123adbc0c00532cc47012b64d38b47e9"}, - {file = "fastavro-1.9.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fb744e9de40fb1dc75354098c8db7da7636cba50a40f7bef3b3fb20f8d189d88"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:240df8bacd13ff5487f2465604c007d686a566df5cbc01d0550684eaf8ff014a"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bb35c25bbc3904e1c02333bc1ae0173e0a44aa37a8e95d07e681601246e1f1"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b47a54a9700de3eabefd36dabfb237808acae47bc873cada6be6990ef6b165aa"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:48c7b5e6d2f3bf7917af301c275b05c5be3dd40bb04e80979c9e7a2ab31a00d1"}, - {file = "fastavro-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:05d13f98d4e325be40387e27da9bd60239968862fe12769258225c62ec906f04"}, - {file = "fastavro-1.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5b47948eb196263f6111bf34e1cd08d55529d4ed46eb50c1bc8c7c30a8d18868"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b7a66ad521298ad9373dfe1897a6ccfc38feab54a47b97922e213ae5ad8870"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44cb154f863ad80e41aea72a709b12e1533b8728c89b9b1348af91a6154ab2f5"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7f2b1fe21231fd01f1a2a90e714ae267fe633cd7ce930c0aea33d1c9f4901"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88fbbe16c61d90a89d78baeb5a34dc1c63a27b115adccdbd6b1fb6f787deacf2"}, - {file = "fastavro-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:753f5eedeb5ca86004e23a9ce9b41c5f25eb64a876f95edcc33558090a7f3e4b"}, - {file = "fastavro-1.9.5.tar.gz", hash = "sha256:6419ebf45f88132a9945c51fe555d4f10bb97c236288ed01894f957c6f914553"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "filetype" -version = "1.2.0" -description = "Infer file type and MIME type of any file/buffer. No external dependencies." -optional = false -python-versions = "*" -files = [ - {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, - {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, -] - -[[package]] -name = "firecrawl-py" -version = "0.0.16" -description = "Python SDK for Firecrawl API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26"}, - {file = "firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "flaml" -version = "2.2.0" -description = "A fast library for automated machine learning and tuning" -optional = false -python-versions = ">=3.6" -files = [ - {file = "FLAML-2.2.0-py3-none-any.whl", hash = "sha256:eb7429801879f66901ec13892ea21a914e3a5a094151b621a924e554637ec4a4"}, - {file = "flaml-2.2.0.tar.gz", hash = "sha256:edf6bc2b5dda66e035d26048e8e181af8f5a827feffa138e67e712989c939359"}, -] - -[package.dependencies] -NumPy = ">=1.17" - -[package.extras] -autogen = ["diskcache", "openai (==0.27.8)", "termcolor"] -automl = ["lightgbm (>=2.3.1)", "pandas (>=1.1.4)", "scikit-learn (>=1.0.0)", "scipy (>=1.4.1)", "xgboost (>=0.90,<3.0.0)"] -autozero = ["packaging", "pandas", "scikit-learn"] -azureml = ["azureml-mlflow"] -benchmark = ["catboost (>=0.26)", "pandas (==1.1.4)", "psutil (==5.8.0)", "xgboost (==1.3.3)"] -blendsearch = ["optuna (>=2.8.0,<=3.6.1)", "packaging"] -catboost = ["catboost (>=0.26,<1.2)", "catboost (>=0.26,<=1.2.5)"] -forecast = ["hcrystalball (==0.1.10)", "holidays (<0.14)", "prophet (>=1.0.1)", "pytorch-forecasting (>=0.9.0)", "pytorch-lightning (==1.9.0)", "statsmodels (>=0.12.2)", "tensorboardX (==2.6)"] -hf = ["datasets", "nltk", "rouge-score", "seqeval", "transformers[torch] (==4.26)"] -mathchat = ["diskcache", "openai (==0.27.8)", "pydantic (==1.10.9)", "sympy", "termcolor", "wolframalpha"] -nlp = ["datasets", "nltk", "rouge-score", "seqeval", "transformers[torch] (==4.26)"] -nni = ["nni"] -notebook = ["jupyter"] -openai = ["diskcache", "openai (==0.27.8)"] -ray = ["ray[tune] (>=1.13,<2.0)"] -retrievechat = ["chromadb", "diskcache", "openai (==0.27.8)", "sentence-transformers", "termcolor", "tiktoken"] -spark = ["joblib (<=1.3.2)", "joblibspark (>=0.5.0)", "pyspark (>=3.2.0)"] -synapse = ["joblibspark (>=0.5.0)", "optuna (>=2.8.0,<=3.6.1)", "pyspark (>=3.2.0)"] -test = ["catboost (>=0.26)", "catboost (>=0.26,<1.2)", "coverage (>=5.3)", "dataclasses", "datasets", "hcrystalball (==0.1.10)", "ipykernel", "joblib (<=1.3.2)", "joblibspark (>=0.5.0)", "jupyter", "lightgbm (>=2.3.1)", "mlflow", "nbconvert", "nbformat", "nltk", "openml", "optuna (>=2.8.0,<=3.6.1)", "packaging", "pandas (>=1.1.4)", "pre-commit", "psutil (==5.8.0)", "pydantic (==1.10.9)", "pyspark (>=3.2.0)", "pytest (>=6.1.1)", "pytorch-forecasting (>=0.9.0,<=0.10.1)", "pytorch-lightning (<1.9.1)", "requests (<2.29.0)", "rgf-python", "rouge-score", "scikit-learn (>=1.0.0)", "scipy (>=1.4.1)", "seqeval", "statsmodels (>=0.12.2)", "sympy", "tensorboardX (==2.6)", "thop", "torch", "torchvision", "transformers[torch] (==4.26)", "wolframalpha", "xgboost (>=0.90,<2.0.0)"] -ts-forecast = ["hcrystalball (==0.1.10)", "holidays (<0.14)", "prophet (>=1.0.1)", "statsmodels (>=0.12.2)"] -vw = ["scikit-learn", "vowpalwabbit (>=8.10.0,<9.0.0)"] - -[[package]] -name = "flatbuffers" -version = "24.3.25" -description = "The FlatBuffers serialization format for Python" -optional = false -python-versions = "*" -files = [ - {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, - {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, -] - -[[package]] -name = "flower" -version = "2.0.1" -description = "Celery Flower" -optional = true -python-versions = ">=3.7" -files = [ - {file = "flower-2.0.1-py2.py3-none-any.whl", hash = "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2"}, - {file = "flower-2.0.1.tar.gz", hash = "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0"}, -] - -[package.dependencies] -celery = ">=5.0.5" -humanize = "*" -prometheus-client = ">=0.8.0" -pytz = "*" -tornado = ">=5.0.0,<7.0.0" - -[[package]] -name = "frozendict" -version = "2.4.4" -description = "A simple immutable dictionary" -optional = false -python-versions = ">=3.6" -files = [ - {file = "frozendict-2.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a59578d47b3949437519b5c39a016a6116b9e787bb19289e333faae81462e59"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a342e439aef28ccec533f0253ea53d75fe9102bd6ea928ff530e76eac38906"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f79c26dff10ce11dad3b3627c89bb2e87b9dd5958c2b24325f16a23019b8b94"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2bd009cf4fc47972838a91e9b83654dc9a095dc4f2bb3a37c3f3124c8a364543"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:87ebcde21565a14fe039672c25550060d6f6d88cf1f339beac094c3b10004eb0"}, - {file = "frozendict-2.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:fefeb700bc7eb8b4c2dc48704e4221860d254c8989fb53488540bc44e44a1ac2"}, - {file = "frozendict-2.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:4297d694eb600efa429769125a6f910ec02b85606f22f178bafbee309e7d3ec7"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:812ab17522ba13637826e65454115a914c2da538356e85f43ecea069813e4b33"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fee9420475bb6ff357000092aa9990c2f6182b2bab15764330f4ad7de2eae49"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3148062675536724502c6344d7c485dd4667fdf7980ca9bd05e338ccc0c4471e"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:78c94991944dd33c5376f720228e5b252ee67faf3bac50ef381adc9e51e90d9d"}, - {file = "frozendict-2.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:1697793b5f62b416c0fc1d94638ec91ed3aa4ab277f6affa3a95216ecb3af170"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199a4d32194f3afed6258de7e317054155bc9519252b568d9cfffde7e4d834e5"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85375ec6e979e6373bffb4f54576a68bf7497c350861d20686ccae38aab69c0a"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2d8536e068d6bf281f23fa835ac07747fb0f8851879dd189e9709f9567408b4d"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:259528ba6b56fa051bc996f1c4d8b57e30d6dd3bc2f27441891b04babc4b5e73"}, - {file = "frozendict-2.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:07c3a5dee8bbb84cba770e273cdbf2c87c8e035903af8f781292d72583416801"}, - {file = "frozendict-2.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6874fec816b37b6eb5795b00e0574cba261bf59723e2de607a195d5edaff0786"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f92425686323a950337da4b75b4c17a3327b831df8c881df24038d560640d4"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d58d9a8d9e49662c6dafbea5e641f97decdb3d6ccd76e55e79818415362ba25"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93a7b19afb429cbf99d56faf436b45ef2fa8fe9aca89c49eb1610c3bd85f1760"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b70b431e3a72d410a2cdf1497b3aba2f553635e0c0f657ce311d841bf8273b6"}, - {file = "frozendict-2.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:e1b941132d79ce72d562a13341d38fc217bc1ee24d8c35a20d754e79ff99e038"}, - {file = "frozendict-2.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc2228874eacae390e63fd4f2bb513b3144066a977dc192163c9f6c7f6de6474"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63aa49f1919af7d45fb8fd5dec4c0859bc09f46880bd6297c79bb2db2969b63d"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6bf9260018d653f3cab9bd147bd8592bf98a5c6e338be0491ced3c196c034a3"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6eb716e6a6d693c03b1d53280a1947716129f5ef9bcdd061db5c17dea44b80fe"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d13b4310db337f4d2103867c5a05090b22bc4d50ca842093779ef541ea9c9eea"}, - {file = "frozendict-2.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:b3b967d5065872e27b06f785a80c0ed0a45d1f7c9b85223da05358e734d858ca"}, - {file = "frozendict-2.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:4ae8d05c8d0b6134bfb6bfb369d5fa0c4df21eabb5ca7f645af95fdc6689678e"}, - {file = "frozendict-2.4.4-py311-none-any.whl", hash = "sha256:705efca8d74d3facbb6ace80ab3afdd28eb8a237bfb4063ed89996b024bc443d"}, - {file = "frozendict-2.4.4-py312-none-any.whl", hash = "sha256:d9647563e76adb05b7cde2172403123380871360a114f546b4ae1704510801e5"}, - {file = "frozendict-2.4.4.tar.gz", hash = "sha256:3f7c031b26e4ee6a3f786ceb5e3abf1181c4ade92dce1f847da26ea2c96008c7"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.3.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, - {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, -] - -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "future" -version = "1.0.0" -description = "Clean single-source support for Python 3 and 2" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, - {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, -] - -[[package]] -name = "geomet" -version = "0.2.1.post1" -description = "GeoJSON <-> WKT/WKB conversion utilities" -optional = false -python-versions = ">2.6, !=3.3.*, <4" -files = [ - {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"}, - {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"}, -] - -[package.dependencies] -click = "*" -six = "*" - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] - -[[package]] -name = "google-ai-generativelanguage" -version = "0.6.6" -description = "Google Ai Generativelanguage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-ai-generativelanguage-0.6.6.tar.gz", hash = "sha256:1739f035caeeeca5c28f887405eec8690f3372daf79fecf26454a97a4f1733a8"}, - {file = "google_ai_generativelanguage-0.6.6-py3-none-any.whl", hash = "sha256:59297737931f073d55ce1268dcc6d95111ee62850349d2b6cde942b16a4fca5c"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-api-core" -version = "2.19.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-api-python-client" -version = "2.141.0" -description = "Google API Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_api_python_client-2.141.0-py2.py3-none-any.whl", hash = "sha256:43c05322b91791204465291b3852718fae38d4f84b411d8be847c4f86882652a"}, - {file = "google_api_python_client-2.141.0.tar.gz", hash = "sha256:0f225b1f45d5a6f8c2a400f48729f5d6da9a81138e81e0478d61fdd8edf6563a"}, -] - -[package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" -google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" -google-auth-httplib2 = ">=0.2.0,<1.0.0" -httplib2 = ">=0.19.0,<1.dev0" -uritemplate = ">=3.0.1,<5" - -[[package]] -name = "google-auth" -version = "2.33.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.33.0-py2.py3-none-any.whl", hash = "sha256:8eff47d0d4a34ab6265c50a106a3362de6a9975bb08998700e389f857e4d39df"}, - {file = "google_auth-2.33.0.tar.gz", hash = "sha256:d6a52342160d7290e334b4d47ba390767e4438ad0d45b7630774533e82655b95"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-auth-httplib2" -version = "0.2.0" -description = "Google Authentication Library: httplib2 transport" -optional = false -python-versions = "*" -files = [ - {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, - {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, -] - -[package.dependencies] -google-auth = "*" -httplib2 = ">=0.19.0" - -[[package]] -name = "google-auth-oauthlib" -version = "1.2.1" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f"}, - {file = "google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263"}, -] - -[package.dependencies] -google-auth = ">=2.15.0" -requests-oauthlib = ">=0.7.0" - -[package.extras] -tool = ["click (>=6.0.0)"] - -[[package]] -name = "google-cloud-aiplatform" -version = "1.62.0" -description = "Vertex AI API client library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "google-cloud-aiplatform-1.62.0.tar.gz", hash = "sha256:e15d5b2a99e30d4a16f4c51cfb8129962e6da41a9027d2ea696abe0e2f006fe8"}, - {file = "google_cloud_aiplatform-1.62.0-py2.py3-none-any.whl", hash = "sha256:d7738e0fd4494a54ae08a51755a2143d58937cba2db826189771f45566c9ee3c"}, -] - -[package.dependencies] -docstring-parser = "<1" -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" -google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" -google-cloud-storage = ">=1.32.0,<3.0.0dev" -packaging = ">=14.3" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" -pydantic = "<3" -shapely = "<3.0.0dev" - -[package.extras] -autologging = ["mlflow (>=1.27.0,<=2.1.1)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] -endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] -langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "orjson (<=3.10.6)", "tenacity (<=8.3)"] -langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "orjson (<=3.10.6)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] -lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] -metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (>=5.3.1,<7)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] -preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] -private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] -reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] -tokenization = ["sentencepiece (>=0.2.0)"] -vizier = ["google-vizier (>=0.1.6)"] -xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.25.0" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-resource-manager" -version = "1.12.5" -description = "Google Cloud Resource Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, - {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "google-cloud-storage" -version = "2.18.2" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166"}, - {file = "google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99"}, -] - -[package.dependencies] -google-api-core = ">=2.15.0,<3.0.0dev" -google-auth = ">=2.26.1,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.7.2" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<6.0.0dev)"] -tracing = ["opentelemetry-api (>=1.1.0)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-generativeai" -version = "0.7.2" -description = "Google Generative AI High level API client library and tools." -optional = false -python-versions = ">=3.9" -files = [ - {file = "google_generativeai-0.7.2-py3-none-any.whl", hash = "sha256:3117d1ebc92ee77710d4bc25ab4763492fddce9b6332eb25d124cf5d8b78b339"}, -] - -[package.dependencies] -google-ai-generativelanguage = "0.6.6" -google-api-core = "*" -google-api-python-client = "*" -google-auth = ">=2.15.0" -protobuf = "*" -pydantic = "*" -tqdm = "*" -typing-extensions = "*" - -[package.extras] -dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] - -[[package]] -name = "google-resumable-media" -version = "2.7.2" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, - {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - -[[package]] -name = "google-search-results" -version = "2.4.2" -description = "Scrape and search localized results from Google, Bing, Baidu, Yahoo, Yandex, Ebay, Homedepot, youtube at scale using SerpApi.com" -optional = false -python-versions = ">=3.5" -files = [ - {file = "google_search_results-2.4.2.tar.gz", hash = "sha256:603a30ecae2af8e600b22635757a6df275dad4b934f975e67878ccd640b78245"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "googleapis-common-protos" -version = "1.63.2" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "gotrue" -version = "2.6.2" -description = "Python Client Library for Supabase Auth" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "gotrue-2.6.2-py3-none-any.whl", hash = "sha256:7d3b45a1034e969794b671cdd8f564847a9ff51bc7863ed1e7092a4fc0e608b7"}, - {file = "gotrue-2.6.2.tar.gz", hash = "sha256:33631b4ced462df8769e490266d409182b084116b1ad97a93d879965c8156936"}, -] - -[package.dependencies] -httpx = {version = ">=0.24,<0.28", extras = ["http2"]} -pydantic = ">=1.10,<3" - -[[package]] -name = "gprof2dot" -version = "2024.6.6" -description = "Generate a dot graph from the output of several profilers." -optional = false -python-versions = ">=3.8" -files = [ - {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, - {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, -] - -[[package]] -name = "gptcache" -version = "0.1.44" -description = "GPTCache, a powerful caching library that can be used to speed up and lower the cost of chat applications that rely on the LLM service. GPTCache works as a memcache for AIGC applications, similar to how Redis works for traditional applications." -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "gptcache-0.1.44-py3-none-any.whl", hash = "sha256:11ddd63b173dc3822b8c2eb7588ea947c825845ed0737b043038a238286bfec4"}, - {file = "gptcache-0.1.44.tar.gz", hash = "sha256:d3d5e6a75c57594dc58212c2d6c53a7999c23ede30e0be66d213d885c0ad0be9"}, -] - -[package.dependencies] -cachetools = "*" -numpy = "*" -requests = "*" - -[[package]] -name = "gql" -version = "3.5.0" -description = "GraphQL client for Python" -optional = false -python-versions = "*" -files = [ - {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, - {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, -] - -[package.dependencies] -anyio = ">=3.0,<5" -backoff = ">=1.11.1,<3.0" -graphql-core = ">=3.2,<3.3" -yarl = ">=1.6,<2.0" - -[package.extras] -aiohttp = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)"] -all = ["aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "websockets (>=10,<12)"] -botocore = ["botocore (>=1.21,<2)"] -dev = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "httpx (>=0.23.1,<1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "sphinx (>=5.3.0,<6)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -httpx = ["httpx (>=0.23.1,<1)"] -requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)"] -test = ["aiofiles", "aiohttp (>=3.8.0,<4)", "aiohttp (>=3.9.0b0,<4)", "botocore (>=1.21,<2)", "httpx (>=0.23.1,<1)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=1.0.0,<2)", "vcrpy (==4.4.0)", "websockets (>=10,<12)"] -test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==7.4.2)", "pytest-asyncio (==0.21.1)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.4.0)"] -websockets = ["websockets (>=10,<12)"] - -[[package]] -name = "grandalf" -version = "0.8" -description = "Graph and drawing algorithms framework" -optional = false -python-versions = "*" -files = [ - {file = "grandalf-0.8-py3-none-any.whl", hash = "sha256:793ca254442f4a79252ea9ff1ab998e852c1e071b863593e5383afee906b4185"}, - {file = "grandalf-0.8.tar.gz", hash = "sha256:2813f7aab87f0d20f334a3162ccfbcbf085977134a17a5b516940a93a77ea974"}, -] - -[package.dependencies] -pyparsing = "*" - -[package.extras] -full = ["numpy", "ply"] - -[[package]] -name = "graphql-core" -version = "3.2.3" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, -] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "groq" -version = "0.9.0" -description = "The official Python library for the groq API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "groq-0.9.0-py3-none-any.whl", hash = "sha256:d0e46f4ad645504672bb09c8100af3ced3a7db0d5119dc13e4aca535fc455874"}, - {file = "groq-0.9.0.tar.gz", hash = "sha256:130ed5e35d3acfaab46b9e7a078eeaebf91052f4a9d71f86f87fb319b5fec332"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -typing-extensions = ">=4.7,<5" - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.1" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, - {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "grpcio" -version = "1.63.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, - {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b3afbd9d6827fa6f475a4f91db55e441113f6d3eb9b7ebb8fb806e5bb6d6bd0d"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f3f6883ce54a7a5f47db43289a0a4c776487912de1a0e2cc83fdaec9685cc9f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8dae9cc0412cb86c8de5a8f3be395c5119a370f3ce2e69c8b7d46bb9872c8d"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e1559fd3b3b4468486b26b0af64a3904a8dbc78d8d936af9c1cf9636eb3e8b"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c039ef01516039fa39da8a8a43a95b64e288f79f42a17e6c2904a02a319b357"}, - {file = "grpcio-1.63.0-cp310-cp310-win32.whl", hash = "sha256:ad2ac8903b2eae071055a927ef74121ed52d69468e91d9bcbd028bd0e554be6d"}, - {file = "grpcio-1.63.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2e44f59316716532a993ca2966636df6fbe7be4ab6f099de6815570ebe4383a"}, - {file = "grpcio-1.63.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f28f8b2db7b86c77916829d64ab21ff49a9d8289ea1564a2b2a3a8ed9ffcccd3"}, - {file = "grpcio-1.63.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65bf975639a1f93bee63ca60d2e4951f1b543f498d581869922910a476ead2f5"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b5194775fec7dc3dbd6a935102bb156cd2c35efe1685b0a46c67b927c74f0cfb"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4cbb2100ee46d024c45920d16e888ee5d3cf47c66e316210bc236d5bebc42b3"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff737cf29b5b801619f10e59b581869e32f400159e8b12d7a97e7e3bdeee6a2"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd1e68776262dd44dedd7381b1a0ad09d9930ffb405f737d64f505eb7f77d6c7"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f45f27f516548e23e4ec3fbab21b060416007dbe768a111fc4611464cc773f"}, - {file = "grpcio-1.63.0-cp311-cp311-win32.whl", hash = "sha256:878b1d88d0137df60e6b09b74cdb73db123f9579232c8456f53e9abc4f62eb3c"}, - {file = "grpcio-1.63.0-cp311-cp311-win_amd64.whl", hash = "sha256:756fed02dacd24e8f488f295a913f250b56b98fb793f41d5b2de6c44fb762434"}, - {file = "grpcio-1.63.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:93a46794cc96c3a674cdfb59ef9ce84d46185fe9421baf2268ccb556f8f81f57"}, - {file = "grpcio-1.63.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a7b19dfc74d0be7032ca1eda0ed545e582ee46cd65c162f9e9fc6b26ef827dc6"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8064d986d3a64ba21e498b9a376cbc5d6ab2e8ab0e288d39f266f0fca169b90d"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:219bb1848cd2c90348c79ed0a6b0ea51866bc7e72fa6e205e459fedab5770172"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d60cd1d58817bc5985fae6168d8b5655c4981d448d0f5b6194bbcc038090d2"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e350cb096e5c67832e9b6e018cf8a0d2a53b2a958f6251615173165269a91b0"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:56cdf96ff82e3cc90dbe8bac260352993f23e8e256e063c327b6cf9c88daf7a9"}, - {file = "grpcio-1.63.0-cp312-cp312-win32.whl", hash = "sha256:3a6d1f9ea965e750db7b4ee6f9fdef5fdf135abe8a249e75d84b0a3e0c668a1b"}, - {file = "grpcio-1.63.0-cp312-cp312-win_amd64.whl", hash = "sha256:d2497769895bb03efe3187fb1888fc20e98a5f18b3d14b606167dacda5789434"}, - {file = "grpcio-1.63.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fdf348ae69c6ff484402cfdb14e18c1b0054ac2420079d575c53a60b9b2853ae"}, - {file = "grpcio-1.63.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a3abfe0b0f6798dedd2e9e92e881d9acd0fdb62ae27dcbbfa7654a57e24060c0"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6ef0ad92873672a2a3767cb827b64741c363ebaa27e7f21659e4e31f4d750280"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b416252ac5588d9dfb8a30a191451adbf534e9ce5f56bb02cd193f12d8845b7f"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b77eaefc74d7eb861d3ffbdf91b50a1bb1639514ebe764c47773b833fa2d91"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b005292369d9c1f80bf70c1db1c17c6c342da7576f1c689e8eee4fb0c256af85"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdcda1156dcc41e042d1e899ba1f5c2e9f3cd7625b3d6ebfa619806a4c1aadda"}, - {file = "grpcio-1.63.0-cp38-cp38-win32.whl", hash = "sha256:01799e8649f9e94ba7db1aeb3452188048b0019dc37696b0f5ce212c87c560c3"}, - {file = "grpcio-1.63.0-cp38-cp38-win_amd64.whl", hash = "sha256:6a1a3642d76f887aa4009d92f71eb37809abceb3b7b5a1eec9c554a246f20e3a"}, - {file = "grpcio-1.63.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:75f701ff645858a2b16bc8c9fc68af215a8bb2d5a9b647448129de6e85d52bce"}, - {file = "grpcio-1.63.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cacdef0348a08e475a721967f48206a2254a1b26ee7637638d9e081761a5ba86"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0697563d1d84d6985e40ec5ec596ff41b52abb3fd91ec240e8cb44a63b895094"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6426e1fb92d006e47476d42b8f240c1d916a6d4423c5258ccc5b105e43438f61"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48cee31bc5f5a31fb2f3b573764bd563aaa5472342860edcc7039525b53e46a"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50344663068041b34a992c19c600236e7abb42d6ec32567916b87b4c8b8833b3"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:259e11932230d70ef24a21b9fb5bb947eb4703f57865a404054400ee92f42f5d"}, - {file = "grpcio-1.63.0-cp39-cp39-win32.whl", hash = "sha256:a44624aad77bf8ca198c55af811fd28f2b3eaf0a50ec5b57b06c034416ef2d0a"}, - {file = "grpcio-1.63.0-cp39-cp39-win_amd64.whl", hash = "sha256:166e5c460e5d7d4656ff9e63b13e1f6029b122104c1633d5f37eaea348d7356d"}, - {file = "grpcio-1.63.0.tar.gz", hash = "sha256:f3023e14805c61bc439fb40ca545ac3d5740ce66120a678a3c6c2c55b70343d1"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.63.0)"] - -[[package]] -name = "grpcio-health-checking" -version = "1.62.3" -description = "Standard Health Checking Service for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-health-checking-1.62.3.tar.gz", hash = "sha256:5074ba0ce8f0dcfe328408ec5c7551b2a835720ffd9b69dade7fa3e0dc1c7a93"}, - {file = "grpcio_health_checking-1.62.3-py3-none-any.whl", hash = "sha256:f29da7dd144d73b4465fe48f011a91453e9ff6c8af0d449254cf80021cab3e0d"}, -] - -[package.dependencies] -grpcio = ">=1.62.3" -protobuf = ">=4.21.6" - -[[package]] -name = "grpcio-status" -version = "1.62.3" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, - {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.3" -protobuf = ">=4.21.6" - -[[package]] -name = "grpcio-tools" -version = "1.62.3" -description = "Protobuf code generator for gRPC" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0a8c0c4724ae9c2181b7dbc9b186df46e4f62cb18dc184e46d06c0ebeccf569e"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5782883a27d3fae8c425b29a9d3dcf5f47d992848a1b76970da3b5a28d424b26"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d812daffd0c2d2794756bd45a353f89e55dc8f91eb2fc840c51b9f6be62667"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b47d0dda1bdb0a0ba7a9a6de88e5a1ed61f07fad613964879954961e36d49193"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca246dffeca0498be9b4e1ee169b62e64694b0f92e6d0be2573e65522f39eea9"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win32.whl", hash = "sha256:6a56d344b0bab30bf342a67e33d386b0b3c4e65868ffe93c341c51e1a8853ca5"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win_amd64.whl", hash = "sha256:710fecf6a171dcbfa263a0a3e7070e0df65ba73158d4c539cec50978f11dad5d"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:ec6fbded0c61afe6f84e3c2a43e6d656791d95747d6d28b73eff1af64108c434"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:bfda6ee8990997a9df95c5606f3096dae65f09af7ca03a1e9ca28f088caca5cf"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b77f9f9cee87cd798f0fe26b7024344d1b03a7cd2d2cba7035f8433b13986325"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e02d3b96f2d0e4bab9ceaa30f37d4f75571e40c6272e95364bff3125a64d184"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1da38070738da53556a4b35ab67c1b9884a5dd48fa2f243db35dc14079ea3d0c"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ace43b26d88a58dcff16c20d23ff72b04d0a415f64d2820f4ff06b1166f50557"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-win_amd64.whl", hash = "sha256:350a80485e302daaa95d335a931f97b693e170e02d43767ab06552c708808950"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:c3a1ac9d394f8e229eb28eec2e04b9a6f5433fa19c9d32f1cb6066e3c5114a1d"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:11f363570dea661dde99e04a51bd108a5807b5df32a6f8bdf4860e34e94a4dbf"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9ad9950119d8ae27634e68b7663cc8d340ae535a0f80d85a55e56a6973ab1f"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c5d22b252dcef11dd1e0fbbe5bbfb9b4ae048e8880d33338215e8ccbdb03edc"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:27cd9ef5c5d68d5ed104b6dcb96fe9c66b82050e546c9e255716903c3d8f0373"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f4b1615adf67bd8bb71f3464146a6f9949972d06d21a4f5e87e73f6464d97f57"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win32.whl", hash = "sha256:e18e15287c31baf574fcdf8251fb7f997d64e96c6ecf467906e576da0a079af6"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win_amd64.whl", hash = "sha256:6c3064610826f50bd69410c63101954676edc703e03f9e8f978a135f1aaf97c1"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:8e62cc7164b0b7c5128e637e394eb2ef3db0e61fc798e80c301de3b2379203ed"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c8ad5cce554e2fcaf8842dee5d9462583b601a3a78f8b76a153c38c963f58c10"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec279dcf3518201fc592c65002754f58a6b542798cd7f3ecd4af086422f33f29"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c989246c2aebc13253f08be32538a4039a64e12d9c18f6d662d7aee641dc8b5"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca4f5eeadbb57cf03317d6a2857823239a63a59cc935f5bd6cf6e8b7af7a7ecc"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0cb3a3436ac119cbd37a7d3331d9bdf85dad21a6ac233a3411dff716dcbf401e"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win32.whl", hash = "sha256:3eae6ea76d62fcac091e1f15c2dcedf1dc3f114f8df1a972a8a0745e89f4cf61"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win_amd64.whl", hash = "sha256:eec73a005443061f4759b71a056f745e3b000dc0dc125c9f20560232dfbcbd14"}, -] - -[package.dependencies] -grpcio = ">=1.62.3" -protobuf = ">=4.21.6,<5.0dev" -setuptools = "*" - -[[package]] -name = "gunicorn" -version = "22.0.0" -description = "WSGI HTTP Server for UNIX" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] -tornado = ["tornado (>=0.2)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "h2" -version = "4.1.0" -description = "HTTP/2 State-Machine based protocol implementation" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] - -[package.dependencies] -hpack = ">=4.0,<5" -hyperframe = ">=6.0,<7" - -[[package]] -name = "hpack" -version = "4.0.0" -description = "Pure-Python HPACK header compression" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] - -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httplib2" -version = "0.22.0" -description = "A comprehensive HTTP client library." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] - -[package.dependencies] -pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "httpx-sse" -version = "0.4.0" -description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, - {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, -] - -[[package]] -name = "huggingface-hub" -version = "0.22.2" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"}, - {file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"}, -] - -[package.dependencies] -aiohttp = {version = "*", optional = true, markers = "extra == \"inference\""} -filelock = "*" -fsspec = ">=2023.5.0" -minijinja = {version = ">=1.0", optional = true, markers = "extra == \"inference\""} -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "humanfriendly" -version = "10.0" -description = "Human friendly output for text interfaces using Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, - {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, -] - -[package.dependencies] -pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} - -[[package]] -name = "humanize" -version = "4.10.0" -description = "Python humanize utilities" -optional = true -python-versions = ">=3.8" -files = [ - {file = "humanize-4.10.0-py3-none-any.whl", hash = "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6"}, - {file = "humanize-4.10.0.tar.gz", hash = "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978"}, -] - -[package.extras] -tests = ["freezegun", "pytest", "pytest-cov"] - -[[package]] -name = "hyperframe" -version = "6.0.1" -description = "HTTP/2 framing layer for Python" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] - -[[package]] -name = "identify" -version = "2.6.0" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "ijson" -version = "3.3.0" -description = "Iterative JSON parser with standard Python iterator interfaces" -optional = false -python-versions = "*" -files = [ - {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, - {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, - {file = "ijson-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b73b493af9e947caed75d329676b1b801d673b17481962823a3e55fe529c8b8b"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5576415f3d76290b160aa093ff968f8bf6de7d681e16e463a0134106b506f49"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e9ffe358d5fdd6b878a8a364e96e15ca7ca57b92a48f588378cef315a8b019e"}, - {file = "ijson-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8643c255a25824ddd0895c59f2319c019e13e949dc37162f876c41a283361527"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:df3ab5e078cab19f7eaeef1d5f063103e1ebf8c26d059767b26a6a0ad8b250a3"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dc1fb02c6ed0bae1b4bf96971258bf88aea72051b6e4cebae97cff7090c0607"}, - {file = "ijson-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e9afd97339fc5a20f0542c971f90f3ca97e73d3050cdc488d540b63fae45329a"}, - {file = "ijson-3.3.0-cp310-cp310-win32.whl", hash = "sha256:844c0d1c04c40fd1b60f148dc829d3f69b2de789d0ba239c35136efe9a386529"}, - {file = "ijson-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d654d045adafdcc6c100e8e911508a2eedbd2a1b5f93f930ba13ea67d7704ee9"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:501dce8eaa537e728aa35810656aa00460a2547dcb60937c8139f36ec344d7fc"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658ba9cad0374d37b38c9893f4864f284cdcc7d32041f9808fba8c7bcaadf134"}, - {file = "ijson-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2636cb8c0f1023ef16173f4b9a233bcdb1df11c400c603d5f299fac143ca8d70"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd174b90db68c3bcca273e9391934a25d76929d727dc75224bf244446b28b03b"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97a9aea46e2a8371c4cf5386d881de833ed782901ac9f67ebcb63bb3b7d115af"}, - {file = "ijson-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c594c0abe69d9d6099f4ece17763d53072f65ba60b372d8ba6de8695ce6ee39e"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e0ff16c224d9bfe4e9e6bd0395826096cda4a3ef51e6c301e1b61007ee2bd24"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0015354011303175eae7e2ef5136414e91de2298e5a2e9580ed100b728c07e51"}, - {file = "ijson-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034642558afa57351a0ffe6de89e63907c4cf6849070cc10a3b2542dccda1afe"}, - {file = "ijson-3.3.0-cp311-cp311-win32.whl", hash = "sha256:192e4b65495978b0bce0c78e859d14772e841724d3269fc1667dc6d2f53cc0ea"}, - {file = "ijson-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:72e3488453754bdb45c878e31ce557ea87e1eb0f8b4fc610373da35e8074ce42"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:988e959f2f3d59ebd9c2962ae71b97c0df58323910d0b368cc190ad07429d1bb"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2f73f0d0fce5300f23a1383d19b44d103bb113b57a69c36fd95b7c03099b181"}, - {file = "ijson-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ee57a28c6bf523d7cb0513096e4eb4dac16cd935695049de7608ec110c2b751"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0155a8f079c688c2ccaea05de1ad69877995c547ba3d3612c1c336edc12a3a5"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ab00721304af1ae1afa4313ecfa1bf16b07f55ef91e4a5b93aeaa3e2bd7917c"}, - {file = "ijson-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40ee3821ee90be0f0e95dcf9862d786a7439bd1113e370736bfdf197e9765bfb"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3b6987a0bc3e6d0f721b42c7a0198ef897ae50579547b0345f7f02486898f5"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:63afea5f2d50d931feb20dcc50954e23cef4127606cc0ecf7a27128ed9f9a9e6"}, - {file = "ijson-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b5c3e285e0735fd8c5a26d177eca8b52512cdd8687ca86ec77a0c66e9c510182"}, - {file = "ijson-3.3.0-cp312-cp312-win32.whl", hash = "sha256:907f3a8674e489abdcb0206723e5560a5cb1fa42470dcc637942d7b10f28b695"}, - {file = "ijson-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8f890d04ad33262d0c77ead53c85f13abfb82f2c8f078dfbf24b78f59534dfdd"}, - {file = "ijson-3.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b9d85a02e77ee8ea6d9e3fd5d515bcc3d798d9c1ea54817e5feb97a9bc5d52fe"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6576cdc36d5a09b0c1a3d81e13a45d41a6763188f9eaae2da2839e8a4240bce"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5589225c2da4bb732c9c370c5961c39a6db72cf69fb2a28868a5413ed7f39e6"}, - {file = "ijson-3.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad04cf38164d983e85f9cba2804566c0160b47086dcca4cf059f7e26c5ace8ca"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:a3b730ef664b2ef0e99dec01b6573b9b085c766400af363833e08ebc1e38eb2f"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:4690e3af7b134298055993fcbea161598d23b6d3ede11b12dca6815d82d101d5"}, - {file = "ijson-3.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:aaa6bfc2180c31a45fac35d40e3312a3d09954638ce0b2e9424a88e24d262a13"}, - {file = "ijson-3.3.0-cp36-cp36m-win32.whl", hash = "sha256:44367090a5a876809eb24943f31e470ba372aaa0d7396b92b953dda953a95d14"}, - {file = "ijson-3.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7e2b3e9ca957153557d06c50a26abaf0d0d6c0ddf462271854c968277a6b5372"}, - {file = "ijson-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47c144117e5c0e2babb559bc8f3f76153863b8dd90b2d550c51dab5f4b84a87f"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ce02af5fbf9ba6abb70765e66930aedf73311c7d840478f1ccecac53fefbf3"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac6c3eeed25e3e2cb9b379b48196413e40ac4e2239d910bb33e4e7f6c137745"}, - {file = "ijson-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d92e339c69b585e7b1d857308ad3ca1636b899e4557897ccd91bb9e4a56c965b"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8c85447569041939111b8c7dbf6f8fa7a0eb5b2c4aebb3c3bec0fb50d7025121"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:542c1e8fddf082159a5d759ee1412c73e944a9a2412077ed00b303ff796907dc"}, - {file = "ijson-3.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:30cfea40936afb33b57d24ceaf60d0a2e3d5c1f2335ba2623f21d560737cc730"}, - {file = "ijson-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:6b661a959226ad0d255e49b77dba1d13782f028589a42dc3172398dd3814c797"}, - {file = "ijson-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0b003501ee0301dbf07d1597482009295e16d647bb177ce52076c2d5e64113e0"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e8d8de44effe2dbd0d8f3eb9840344b2d5b4cc284a14eb8678aec31d1b6bea8"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9cd5c03c63ae06d4f876b9844c5898d0044c7940ff7460db9f4cd984ac7862b5"}, - {file = "ijson-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04366e7e4a4078d410845e58a2987fd9c45e63df70773d7b6e87ceef771b51ee"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de7c1ddb80fa7a3ab045266dca169004b93f284756ad198306533b792774f10a"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8851584fb931cffc0caa395f6980525fd5116eab8f73ece9d95e6f9c2c326c4c"}, - {file = "ijson-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdcfc88347fd981e53c33d832ce4d3e981a0d696b712fbcb45dcc1a43fe65c65"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3917b2b3d0dbbe3296505da52b3cb0befbaf76119b2edaff30bd448af20b5400"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e10c14535abc7ddf3fd024aa36563cd8ab5d2bb6234a5d22c77c30e30fa4fb2b"}, - {file = "ijson-3.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3aba5c4f97f4e2ce854b5591a8b0711ca3b0c64d1b253b04ea7b004b0a197ef6"}, - {file = "ijson-3.3.0-cp38-cp38-win32.whl", hash = "sha256:b325f42e26659df1a0de66fdb5cde8dd48613da9c99c07d04e9fb9e254b7ee1c"}, - {file = "ijson-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:ff835906f84451e143f31c4ce8ad73d83ef4476b944c2a2da91aec8b649570e1"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3c556f5553368dff690c11d0a1fb435d4ff1f84382d904ccc2dc53beb27ba62e"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4396b55a364a03ff7e71a34828c3ed0c506814dd1f50e16ebed3fc447d5188e"}, - {file = "ijson-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6850ae33529d1e43791b30575070670070d5fe007c37f5d06aebc1dd152ab3f"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36aa56d68ea8def26778eb21576ae13f27b4a47263a7a2581ab2ef58b8de4451"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7ec759c4a0fc820ad5dc6a58e9c391e7b16edcb618056baedbedbb9ea3b1524"}, - {file = "ijson-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b51bab2c4e545dde93cb6d6bb34bf63300b7cd06716f195dd92d9255df728331"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92355f95a0e4da96d4c404aa3cff2ff033f9180a9515f813255e1526551298c1"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8795e88adff5aa3c248c1edce932db003d37a623b5787669ccf205c422b91e4a"}, - {file = "ijson-3.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8f83f553f4cde6d3d4eaf58ec11c939c94a0ec545c5b287461cafb184f4b3a14"}, - {file = "ijson-3.3.0-cp39-cp39-win32.whl", hash = "sha256:ead50635fb56577c07eff3e557dac39533e0fe603000684eea2af3ed1ad8f941"}, - {file = "ijson-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c8a9befb0c0369f0cf5c1b94178d0d78f66d9cebb9265b36be6e4f66236076b8"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2af323a8aec8a50fa9effa6d640691a30a9f8c4925bd5364a1ca97f1ac6b9b5c"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f64f01795119880023ba3ce43072283a393f0b90f52b66cc0ea1a89aa64a9ccb"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a716e05547a39b788deaf22725490855337fc36613288aa8ae1601dc8c525553"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473f5d921fadc135d1ad698e2697025045cd8ed7e5e842258295012d8a3bc702"}, - {file = "ijson-3.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd26b396bc3a1e85f4acebeadbf627fa6117b97f4c10b177d5779577c6607744"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:25fd49031cdf5fd5f1fd21cb45259a64dad30b67e64f745cc8926af1c8c243d3"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b72178b1e565d06ab19319965022b36ef41bcea7ea153b32ec31194bec032a2"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d0b6b637d05dbdb29d0bfac2ed8425bb369e7af5271b0cc7cf8b801cb7360c2"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5378d0baa59ae422905c5f182ea0fd74fe7e52a23e3821067a7d58c8306b2191"}, - {file = "ijson-3.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:99f5c8ab048ee4233cc4f2b461b205cbe01194f6201018174ac269bf09995749"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45ff05de889f3dc3d37a59d02096948ce470699f2368b32113954818b21aa74a"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efb521090dd6cefa7aafd120581947b29af1713c902ff54336b7c7130f04c47"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c727691858fd3a1c085d9980d12395517fcbbf02c69fbb22dede8ee03422da"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0420c24e50389bc251b43c8ed379ab3e3ba065ac8262d98beb6735ab14844460"}, - {file = "ijson-3.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8fdf3721a2aa7d96577970f5604bd81f426969c1822d467f07b3d844fa2fecc7"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:891f95c036df1bc95309951940f8eea8537f102fa65715cdc5aae20b8523813b"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1336a2a6e5c427f419da0154e775834abcbc8ddd703004108121c6dd9eba9d"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0c819f83e4f7b7f7463b2dc10d626a8be0c85fbc7b3db0edc098c2b16ac968e"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33afc25057377a6a43c892de34d229a86f89ea6c4ca3dd3db0dcd17becae0dbb"}, - {file = "ijson-3.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7914d0cf083471856e9bc2001102a20f08e82311dfc8cf1a91aa422f9414a0d6"}, - {file = "ijson-3.3.0.tar.gz", hash = "sha256:7f172e6ba1bee0d4c8f8ebd639577bfe429dee0f3f96775a067b8bae4492d8a0"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.2" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.2-py3-none-any.whl", hash = "sha256:8bba8c54a8a3afaa1419910845fa26ebd706dc716dd208d9b158b4b6966f5c5c"}, - {file = "importlib_resources-6.4.2.tar.gz", hash = "sha256:6cbfbefc449cc6e2095dd184691b7a12a04f40bc75dd4c55d31c34f174cdf57a"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -optional = false -python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "instructor" -version = "1.3.3" -description = "structured outputs for llm" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "instructor-1.3.3-py3-none-any.whl", hash = "sha256:94b114b39a1181fa348d162e6e4ff5c4d985324736020c0233fed5d4db444dbd"}, - {file = "instructor-1.3.3.tar.gz", hash = "sha256:e27bf3c1187b0b2130ea38ecde7c2b4f571d6a5ce1397fb15c27490988b45441"}, -] - -[package.dependencies] -aiohttp = ">=3.9.1,<4.0.0" -docstring-parser = ">=0.16,<0.17" -jiter = ">=0.4.1,<0.5.0" -openai = ">=1.1.0,<2.0.0" -pydantic = ">=2.7.0,<3.0.0" -pydantic-core = ">=2.18.0,<3.0.0" -rich = ">=13.7.0,<14.0.0" -tenacity = ">=8.2.3,<9.0.0" -typer = ">=0.9.0,<1.0.0" - -[package.extras] -anthropic = ["anthropic (>=0.27.0,<0.28.0)", "xmltodict (>=0.13.0,<0.14.0)"] -cohere = ["cohere (>=5.1.8,<6.0.0)"] -google-generativeai = ["google-generativeai (>=0.5.4,<0.6.0)"] -groq = ["groq (>=0.4.2,<0.5.0)"] -litellm = ["litellm (>=1.35.31,<2.0.0)"] -mistralai = ["mistralai (>=0.1.8,<0.2.0)"] -test-docs = ["anthropic (>=0.27.0,<0.28.0)", "cohere (>=5.1.8,<6.0.0)", "diskcache (>=5.6.3,<6.0.0)", "fastapi (>=0.109.2,<0.110.0)", "groq (>=0.4.2,<0.5.0)", "litellm (>=1.35.31,<2.0.0)", "mistralai (>=0.1.8,<0.2.0)", "pandas (>=2.2.0,<3.0.0)", "pydantic_extra_types (>=2.6.0,<3.0.0)", "redis (>=5.0.1,<6.0.0)", "tabulate (>=0.9.0,<0.10.0)"] -vertexai = ["google-cloud-aiplatform (>=1.52.0,<2.0.0)", "jsonref (>=1.1.0,<2.0.0)"] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.26.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, - {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "jaraco-context" -version = "5.3.0" -description = "Useful decorators and context managers" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, - {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, -] - -[package.dependencies] -"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jiter" -version = "0.4.2" -description = "Fast iterable JSON parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jiter-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c2b003ff58d14f5e182b875acd5177b2367245c19a03be9a2230535d296f7550"}, - {file = "jiter-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48c77c25f094707731cd5bad6b776046846b60a27ee20efc8fadfb10a89415f"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f50ad6b172bde4d45f4d4ea10c49282a337b8bb735afc99763dfa55ea84a743"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f6001e86f525fbbc9706db2078dc22be078b0950de55b92d37041930f5f940"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16646ef23b62b007de80460d303ebb2d81e355dac9389c787cec87cdd7ffef2f"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b4e847c13b0bf1255c711a92330e7a8cb8b5cdd1e37d7db309627bcdd3367ff"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c536589be60e4c5f2b20fadc4db7e9f55d4c9df3551f29ddf1c4a18dcc9dd54"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3b2763996167830889a854b4ded30bb90897f9b76be78069c50c3ec4540950e"}, - {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:675e8ab98c99495091af6b6e9bf2b6353bcf81f25ab6ce27d36127e315b4505d"}, - {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e48e43d9d999aaf55f53406b8846ff8cbe3e47ee4b9dc37e5a10a65ce760809f"}, - {file = "jiter-0.4.2-cp310-none-win32.whl", hash = "sha256:881b6e67c50bc36acb3570eda693763c8cd77d590940e06fa6d325d0da52ec1b"}, - {file = "jiter-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:bb8f7b43259efc6add0d721ade2953e064b24e2026d26d979bc09ec080844cef"}, - {file = "jiter-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:24ad336ac47f274fa83f6fbedcabff9d3387c80f67c66b992688e6a8ba2c47e9"}, - {file = "jiter-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc392a220095730afe365ce1516f2f88bb085a2fd29ea191be9c6e3c71713d9a"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1fdc408de36c81460896de0176f2f7b9f3574dcd35693a0b2c00f4ca34c98e4"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10ad76722ee6a8c820b0db06a793c08b7d679e5201b9563015bd1e06c959a09"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbb46d1e9c82bba87f0cbda38413e49448a7df35b1e55917124bff9f38974a23"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:194e28ef4b5f3b61408cb2ee6b6dcbcdb0c9063d01b92b01345b7605692849f5"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0a447533eccd62748a727e058efa10a8d7cf1de8ffe1a4d705ecb41dad9090"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5f7704d7260bbb88cca3453951af739589132b26e896a3144fa2dae2263716d7"}, - {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01427458bc9550f2eda09d425755330e7d0eb09adce099577433bebf05d28d59"}, - {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159b8416879c0053b17c352f70b67b749ef5b2924c6154318ecf71918aab0905"}, - {file = "jiter-0.4.2-cp311-none-win32.whl", hash = "sha256:f2445234acfb79048ce1a0d5d0e181abb9afd9e4a29d8d9988fe26cc5773a81a"}, - {file = "jiter-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:e15a65f233b6b0e5ac10ddf3b97ceb18aa9ffba096259961641d78b4ee321bd5"}, - {file = "jiter-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d61d59521aea9745447ce50f74d39a16ef74ec9d6477d9350d77e75a3d774ad2"}, - {file = "jiter-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eef607dc0acc251923427808dbd017f1998ae3c1a0430a261527aa5cbb3a942"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af6bf39954646e374fc47429c656372ac731a6a26b644158a5a84bcdbed33a47"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f509d23606e476852ee46a2b65b5c4ad3905f17424d9cc19c1dffa1c94ba3c6"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59672774daa44ee140aada0c781c82bee4d9ac5e522966186cfb6b3c217d8a51"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a0458efac5afeca254cf557b8a654e17013075a69905c78f88d557f129d871"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8860766d1c293e75c1bb4e25b74fa987e3adf199cac3f5f9e6e49c2bebf092f"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a109f3281b72bbf4921fe43db1005c004a38559ca0b6c4985add81777dfe0a44"}, - {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:faa7e667454b77ad2f0ef87db39f4944de759617aadf210ea2b73f26bb24755f"}, - {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3512f8b00cafb6780b427cb6282800d2bf8277161d9c917830661bd4ed1d3528"}, - {file = "jiter-0.4.2-cp312-none-win32.whl", hash = "sha256:853b35d508ee5b66d06630473c1c0b7bb5e29bf4785c9d2202437116c94f7e21"}, - {file = "jiter-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:4a3a8197784278eb8b24cb02c45e1cad67c2ce5b5b758adfb19b87f74bbdff9c"}, - {file = "jiter-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ca2a4d750aed3154b89f2efb148609fc985fad8db739460797aaf9b478acedda"}, - {file = "jiter-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0e6c304b3cc6896256727e1fb8991c7179a345eca8224e201795e9cacf4683b0"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc34ac708ae1750d077e490321761ec4b9a055b994cbdd1d6fbd37099e4aa7b"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c93383875ab8d2e4f760aaff335b4a12ff32d4f9cf49c4498d657734f611466"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce197ee044add576afca0955b42142dd0312639adb6ebadbdbe4277f2855614f"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a427716813ff65480ca5b5117cfa099f49b49cd38051f8609bd0d5493013ca0"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479990218353356234669e70fac53e5eb6f739a10db25316171aede2c97d9364"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d35a91ec5ac74cf33234c431505299fa91c0a197c2dbafd47400aca7c69489d4"}, - {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b27189847193708c94ad10ca0d891309342ae882725d2187cf5d2db02bde8d1b"}, - {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76c255308cd1093fb411a03756b7bb220e48d4a98c30cbc79ed448bf3978e27d"}, - {file = "jiter-0.4.2-cp38-none-win32.whl", hash = "sha256:bb77438060bad49cc251941e6701b31138365c8a0ddaf10cdded2fcc6dd30701"}, - {file = "jiter-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:ce858af19f7ce0d4b51c9f6c0c9d08f1e9dcef1986c5875efd0674a7054292ca"}, - {file = "jiter-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6128838a2f357b3921b2a3242d5dc002ae4255ecc8f9f05c20d56d7d2d79c5ad"}, - {file = "jiter-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2420cebb9ba856cb57dcab1d2d8def949b464b0db09c22a4e4dbd52fff7b200"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5d13d8128e853b320e00bb18bd4bb8b136cc0936091dc87633648fc688eb705"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eba5d6e54f149c508ba88677f97d3dc7dd75e9980d234bbac8027ac6db0763a3"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fad5d64af0bc0545237419bf4150d8de56f0bd217434bdd1a59730327252bef"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d179e7bca89cf5719bd761dd37a341ff0f98199ecaa9c14af09792e47e977cc"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36353caee9f103d8ee7bda077f6400505b0f370e27eabcab33a33d21de12a2a6"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dd146c25bce576ca5db64fc7eccb8862af00f1f0e30108796953f12a53660e4c"}, - {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:14b7c08cadbcd703041c66dc30e24e17de2f340281cac0e69374223ecf153aa4"}, - {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a90f1a8b3d29aea198f8ea2b01148276ced8056e5103f32525266b3d880e65c9"}, - {file = "jiter-0.4.2-cp39-none-win32.whl", hash = "sha256:25b174997c780337b61ae57b1723455eecae9a17a9659044fd3c3b369190063f"}, - {file = "jiter-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:bef62cea18521c5b99368147040c7e560c55098a35c93456f110678a2d34189a"}, - {file = "jiter-0.4.2.tar.gz", hash = "sha256:29b9d44f23f0c05f46d482f4ebf03213ee290d77999525d0975a17f875bf1eea"}, -] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "jq" -version = "1.7.0" -description = "jq is a lightweight and flexible JSON processor." -optional = false -python-versions = ">=3.5" -files = [ - {file = "jq-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8fae014fa8b2704322a5baa39c112176d9acb71e22ebdb8e21c1c864ecff654"}, - {file = "jq-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40fe068d1fdf2c712885b69be90ddb3e61bca3e4346ab3994641a4fbbeb7be82"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ec105a0057f2f922d195e1d75d4b0ae41c4b38655ead04d1a3a47988fcb1939"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38e2041ca578275334eff9e1d913ae386210345e5ae71cd9c16e3f208dc81deb"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce1df1b6fffeeeb265d4ea3397e9875ab170ba5a7af6b7997c2fd755934df065"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05ebdaa868f068967d9e7cbf76e59e61fbdafa565dbc3579c387fb1f248592bb"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b3f916cb812fcd26bb1b006634d9c0eff240090196ca0ebb5d229b344f624e53"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ad7749a16a16bafd6cebafd5e40990b641b4b6b7b661326864677effc44a500"}, - {file = "jq-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e99ea17b708f55e8bed2f4f68c022119184b17eb15987b384db12e8b6702bd5"}, - {file = "jq-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76735cd19de65c15964d330adbc2c84add8e55dea35ebfe17b9acf88a06a7d57"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b841ddd9089429fc0621d07d1c34ff24f7d6a6245c10125b82806f61e36ae8"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d6b1fc2515b7be92195d50b68f82329cc0250c7fbca790b887d74902ba33870"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6546a57a3ceeed41961be2f1417b4e7a5b3170cca7bb82f5974d2ba9acaab6"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3427ad0f377f188953958e36b76167c8d11b8c8c61575c22deafa4aba58d601f"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:79b9603219fa5082df97d265d71c426613286bd0e5378a8739ce39056fa1e2dc"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2981a24765a747163e0daa23648372b72a006e727895b95d032632aa51094bd"}, - {file = "jq-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a0cc15b2ed511a1a8784c7c7dc07781e28d84a65934062de52487578732e0514"}, - {file = "jq-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90032c2c4e710157d333d166818ede8b9c8ef0f697e59c9427304edc47146f3d"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e715d5f0bdfc0be0ff33cd0a3f6f51f8bc5ad464fab737e2048a1b46b45bb582"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cc5a1ca3a540a5753dbd592f701c1ec7c9cc256becba604490283c055f3f1c"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:293b6e8e4b652d96fdeae7dd5ffb1644199d8b6fc1f95d528c16451925c0482e"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f103868b8902d4ee7f643248bdd7a2de9f9396e4b262f42745b9f624c834d07a"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e9c5ccfa3cf65f92b60c5805ef725f7cd799f2dc16e8601c6e8f12f38a9f48f3"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ca25608d51fdbf8bd5c682b433e1cb9f497155a7c1ea5901524df099f1ceff3"}, - {file = "jq-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6a2d34d962ce2da5136dab2664fc7efad9f71024d0dc328702f2dc70b4e2735c"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:757e8c4cb0cb1175f0aaa227f0a26e4765ba5da04d0bc875b0bd933eff6bd0a0"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d097098a628171b87961fb0400117ac340b1eb40cbbee2e58208c4254c23c20"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45bc842806d71bd5839c190a88fd071ac5a0a8a1dd601e83228494a19f14559c"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0629743417f8709305d1f77d3929493912efdc3fd1cce3a7fcc76b81bc6b82d"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:9b9a49e8b14d3a368011ed1412c8c3e193a7135d5eb4310d77ee643470112b47"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a10e3f88b6d2bbb4c47b368f919ec7b648196bf9c60a5cc921d04239d68240c2"}, - {file = "jq-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aa85b47effb4152e1cf1120607f475a1c11395d072323ff23e8bb59ce6752713"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9413f67ea28037e37ccf8951f9f0b380f31d79162f33e216faa6bd0d8eca0dc7"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3daf3b3443c4e871c23ac1e698eb70d1225b46a4ac79c73968234adcd70f3ed8"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbe03f95ab02dc045691c3b5c7da8d8c2128e60450fb2124ea8b49034c74f158"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a6b2e9f4e63644a30726c58c25d80015f9b83325b125615a46e10d4439b9dc99"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9fffcffc8e56585223878edd7c5d719eb8547281d64af2bac43911f1bb9e7029"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:95d4bcd5a999ce0aaadaadcaca967989f0efc96c1097a81746b21b6126cf7aaf"}, - {file = "jq-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0137445eb67c43eb0eb46933aff7e8afbbd6c5aaf8574efd5df536dc9d177d1d"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee0e9307b6d4fe89a8556a92c1db65e0d66218bcc13fdeb92a09645a55ff87a"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e0f95cecb690df66f23a8d76c746d2ed15671de3f6101140e3fe2b98b97e0a8"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95e472aa54efe418d3627dcd2a369ac0b21e1a5e352550144fd5f0c40585a5b7"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4be2a2b56fa139f3235cdb8422ea16eccdd48d62bf91d9fac10761cd55d26c84"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7db8260ecb57827bb3fb6f44d4a6f0db0570ded990eee95a5fd3ac9ba14f60d7"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fdbb7ff2dfce2cc0f421f498dcb64176997bd9d9e6cab474e59577e7bff3090d"}, - {file = "jq-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:396bef4b4c9c1ebe3e0e04e287bc79a861b991e12db45681c398d3906ee85468"}, - {file = "jq-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18d8a81c6e241585a0bf748903082d65c4eaa6ba80248f507e5cebda36e05c6c"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade00a39990fdfe0acc7d2a900e3e5e6b11a71eb5289954ff0df31ac0afae25b"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c777e88f3cce496c17f5c3bdbc7d74ff12b5cbdaea30f3a374f3cc92e5bba8d"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79957008c67d8f1d9134cd0e01044bff5d795f7e94db9532a9fe9212e1f88a77"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2bc5cb77dd12e861296cfa69587aa6797ccfee4f5f3aa571b02f0273ab1efec1"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e10a5937aab9c383632ab151f73d43dc0c4be99f62221a7044988dc8ddd4bdc"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e6e13e0f8d3204aefe861159160116e822c90bae773a3ccdd4d9e79a06e086e"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0cdbd32463ef632b0b4ca6dab434e2387342bc5c895b411ec6b2a14bbf4b2c12"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:558a5c6b4430e05fa59c4b5631c0d3fc0f163100390c03edc1993663f59d8a9b"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bbf77138cdd8d306bf335d998525a0477e4cb6f00eb6f361288f5b82274e84c"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e6919481ff43754ae9b17a98c877995d5e1346be114c71cd0dfd8ff7d0cd60"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b0584ff33b2a9cc021edec325af4e0fa9fbd54cce80c1f7b8e0ba4cf2d75508"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6e7259880ab7e75e845fb4d56c6d18922c68789d25d7cdbb6f433d9e714613a"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d472cdd0bcb3d47c87b00ff841edff41c79fe4422523c4a7c8bf913fb950f7f"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3430de179f8a7b0baf5675d5ee400f97344085d79f190a90fc0c7df990cbcc"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb375bdb2a44f1a643123b8ec57563bb5542673f0399799ab5662ce90bf4a5"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39a0c71ed2f1ec0462d54678333f1b14d9f25fd62a9f46df140d68552f79d204"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:306c1e3ba531d7dc3284e128689f0b75409a4e8e8a3bdac2c51cc26f2d3cca58"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88b8b0cc838c7387dc5e8c45b192c7504acd0510514658d2d5cd1716fcf15fe3"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c75e16e542f4abaae25727b9fc4eeaf69cb07122be8a2a7672d02feb3a1cc9a"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4828ac689a67fd9c021796bcacd95811bab806939dd6316eb0c2d3de016c584"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:c94f95b27720d2db7f1039fdd371f70bc0cac8e204cbfd0626176d7b8a3053d6"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5ff445fc9b1eb4623a914e04bea9511e654e9143cde82b039383af4f7dc36f2"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07e369ff021fad38a29d6a7a3fc24f7d313e9a239b15ce4eefaffee637466400"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553dfbf674069cb20533d7d74cd8a9d7982bab8e4a5b473fde105d99278df09f"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9fbc76f6fec66e5e58cc84f20a5de80addd3c64ad87a748f5c5f6b4ef01bc8c"}, - {file = "jq-1.7.0.tar.gz", hash = "sha256:f460d1f2c3791617e4fb339fa24efbdbebe672b02c861f057358553642047040"}, -] - -[[package]] -name = "json-repair" -version = "0.25.3" -description = "A package to repair broken json strings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "json_repair-0.25.3-py3-none-any.whl", hash = "sha256:f00b510dd21b31ebe72581bdb07e66381df2883d6f640c89605e482882c12b17"}, - {file = "json_repair-0.25.3.tar.gz", hash = "sha256:4ee970581a05b0b258b749eb8bcac21de380edda97c3717a4edfafc519ec21a4"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpath-python" -version = "1.0.6" -description = "A more powerful JSONPath implementation in modern python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, - {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, -] - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonref" -version = "1.1.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"}, - {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"}, -] - -[[package]] -name = "jsonrpcclient" -version = "4.0.3" -description = "Send JSON-RPC requests" -optional = false -python-versions = ">=3.6" -files = [ - {file = "jsonrpcclient-4.0.3-py3-none-any.whl", hash = "sha256:3cbb9e27e1be29821becf135ea183144a836215422727e1ffe5056a49a670f0d"}, -] - -[package.extras] -qa = ["pytest", "pytest-cov", "tox"] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "kombu" -version = "5.4.0" -description = "Messaging library for Python." -optional = true -python-versions = ">=3.8" -files = [ - {file = "kombu-5.4.0-py3-none-any.whl", hash = "sha256:c8dd99820467610b4febbc7a9e8a0d3d7da2d35116b67184418b51cc520ea6b6"}, - {file = "kombu-5.4.0.tar.gz", hash = "sha256:ad200a8dbdaaa2bbc5f26d2ee7d707d9a1fded353a0f4bd751ce8c7d9f449c60"}, -] - -[package.dependencies] -amqp = ">=5.1.1,<6.0.0" -vine = "5.1.0" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.10.0)"] -azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] -confluentkafka = ["confluent-kafka (>=2.2.0)"] -consul = ["python-consul2 (==0.1.5)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack (==1.0.8)"] -pyro = ["pyro4 (==4.82)"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=2.8.0)"] - -[[package]] -name = "kubernetes" -version = "30.1.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - -[[package]] -name = "lancedb" -version = "0.5.7" -description = "lancedb" -optional = false -python-versions = ">=3.8" -files = [ - {file = "lancedb-0.5.7-py3-none-any.whl", hash = "sha256:6169966f715ef530be545950e1aaf9f3f160967e4ba7456cd67c9f30f678095d"}, - {file = "lancedb-0.5.7.tar.gz", hash = "sha256:878914b493f91d09a77b14f1528104741f273234cbdd6671be705f447701fd51"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -cachetools = "*" -click = ">=8.1.7" -deprecation = "*" -overrides = ">=0.7" -pydantic = ">=1.10" -pylance = "0.9.18" -pyyaml = ">=6.0" -ratelimiter = ">=1.0,<2.0" -requests = ">=2.31.0" -retry = ">=0.9.2" -semver = ">=3.0" -tqdm = ">=4.27.0" - -[package.extras] -clip = ["open-clip", "pillow", "torch"] -dev = ["pre-commit", "ruff"] -docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocs-ultralytics-plugin (==0.0.44)", "mkdocstrings[python]"] -embeddings = ["InstructorEmbedding", "awscli (>=1.29.57)", "boto3 (>=1.28.57)", "botocore (>=1.31.57)", "cohere", "google.generativeai", "huggingface-hub", "open-clip-torch", "openai (>=1.6.1)", "pillow", "sentence-transformers", "torch"] -tests = ["aiohttp", "duckdb", "pandas (>=1.4)", "polars (>=0.19)", "pytest", "pytest-asyncio", "pytest-mock", "pytz"] - -[[package]] -name = "langchain" -version = "0.2.13" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain-0.2.13-py3-none-any.whl", hash = "sha256:80f21e48cdada424dd2af9bbf42234fe095744cf181b31eeb63d1da7479e2783"}, - {file = "langchain-0.2.13.tar.gz", hash = "sha256:947e96ac3153a46aa6a0d8207e5f8b6794084c397f60a01bbf4bba78e6838fee"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.2.30,<0.3.0" -langchain-text-splitters = ">=0.2.0,<0.3.0" -langsmith = ">=0.1.17,<0.2.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -pydantic = ">=1,<3" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" - -[[package]] -name = "langchain-anthropic" -version = "0.1.23" -description = "An integration package connecting AnthropicMessages and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_anthropic-0.1.23-py3-none-any.whl", hash = "sha256:89cafdaf4c9e522484b0ca8bafcceb0a5e4ffca89f7c7c9cec1e2ba411208208"}, - {file = "langchain_anthropic-0.1.23.tar.gz", hash = "sha256:f2ce045bd0ae09d5f11fed4b84a38ce306822b7bcac77232345f40115df66d51"}, -] - -[package.dependencies] -anthropic = ">=0.30.0,<1" -defusedxml = ">=0.7.1,<0.8.0" -langchain-core = ">=0.2.26,<0.3.0" - -[[package]] -name = "langchain-astradb" -version = "0.3.3" -description = "An integration package connecting Astra DB and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_astradb-0.3.3-py3-none-any.whl", hash = "sha256:39deef1253947ef1bfaf3c27881ecdf07621d96c2cf37814aed9e506a9bee217"}, - {file = "langchain_astradb-0.3.3.tar.gz", hash = "sha256:f9a996ec4bef134896195430adeb7f264389c368a03d2ea91356837e8ddde091"}, -] - -[package.dependencies] -astrapy = ">=1.2,<2.0" -langchain-core = ">=0.1.31,<0.3" -numpy = ">=1,<2" - -[[package]] -name = "langchain-aws" -version = "0.1.16" -description = "An integration package connecting AWS and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_aws-0.1.16-py3-none-any.whl", hash = "sha256:b4722a13954a6e330c4b1e7b85a9884bfe789ecc24e11f0e83aa5c294d95093e"}, - {file = "langchain_aws-0.1.16.tar.gz", hash = "sha256:cb479a565d0450c9bfba9d2336973191beb97574a519ddd376713addd5b9226c"}, -] - -[package.dependencies] -boto3 = ">=1.34.131,<1.35.0" -langchain-core = ">=0.2.29,<0.3" -numpy = ">=1,<2" - -[[package]] -name = "langchain-chroma" -version = "0.1.2" -description = "An integration package connecting Chroma and LangChain" -optional = false -python-versions = "<4,>=3.8.1" -files = [ - {file = "langchain_chroma-0.1.2-py3-none-any.whl", hash = "sha256:0948f2975091dfef685a7981c140b8fd8a3b0f0602abba61abbcac7959beee4c"}, - {file = "langchain_chroma-0.1.2.tar.gz", hash = "sha256:745a53b93e7ae058f9666a48e15ff211122656032ed0e8ffb7291b402f5bf23b"}, -] - -[package.dependencies] -chromadb = ">=0.4.0,<0.6.0" -fastapi = ">=0.95.2,<1" -langchain-core = ">=0.1.40,<0.3" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] - -[[package]] -name = "langchain-cohere" -version = "0.1.9" -description = "An integration package connecting Cohere and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_cohere-0.1.9-py3-none-any.whl", hash = "sha256:96d6a15125797319474ac84b54024e5024f3f5fc45032ebf228d95d6998c9b13"}, - {file = "langchain_cohere-0.1.9.tar.gz", hash = "sha256:549620d23bc3d77f62d1045787095fe2c1cfa233dba69455139f9a2f65f952fa"}, -] - -[package.dependencies] -cohere = ">=5.5.6,<6.0" -langchain-core = ">=0.2.2,<0.3" -langchain-experimental = ">=0.0.6" -pandas = ">=1.4.3" -tabulate = ">=0.9.0,<0.10.0" - -[package.extras] -langchain-community = ["langchain-community (>=0.2.4)"] - -[[package]] -name = "langchain-community" -version = "0.2.12" -description = "Community contributed LangChain integrations." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_community-0.2.12-py3-none-any.whl", hash = "sha256:50e74473dd2309bdef561760afbbf0c5ea17ed91fc4dfa0d52279dd16d6d34e0"}, - {file = "langchain_community-0.2.12.tar.gz", hash = "sha256:d671cfc6a4f3b65f49a2e59ab420d0164f109d0a56fc4b4996518205c63b8c7e"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -dataclasses-json = ">=0.5.7,<0.7" -langchain = ">=0.2.13,<0.3.0" -langchain-core = ">=0.2.30,<0.3.0" -langsmith = ">=0.1.0,<0.2.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" - -[[package]] -name = "langchain-core" -version = "0.2.33" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_core-0.2.33-py3-none-any.whl", hash = "sha256:c8de411336c13fa440b7a52895bfd1c064f04d315344855962988483902cc532"}, - {file = "langchain_core-0.2.33.tar.gz", hash = "sha256:dd2659e0a560fc987b210107bf989aa14a6f4b67dd214c13a2c9669036cda975"}, -] - -[package.dependencies] -jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.75,<0.2.0" -packaging = ">=23.2,<25" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" -typing-extensions = ">=4.7" - -[[package]] -name = "langchain-experimental" -version = "0.0.61" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_experimental-0.0.61-py3-none-any.whl", hash = "sha256:f9c516f528f55919743bd56fe1689a53bf74ae7f8902d64b9d8aebc61249cbe2"}, - {file = "langchain_experimental-0.0.61.tar.gz", hash = "sha256:e9538efb994be5db3045cc582cddb9787c8299c86ffeee9d3779b7f58eef2226"}, -] - -[package.dependencies] -langchain-community = ">=0.2.5,<0.3.0" -langchain-core = ">=0.2.7,<0.3.0" - -[[package]] -name = "langchain-google-calendar-tools" -version = "0.0.1" -description = "This repo walks through connecting to the Google Calendar API." -optional = false -python-versions = ">=3.7" -files = [ - {file = "langchain_google_calendar_tools-0.0.1-py3-none-any.whl", hash = "sha256:ce0e62056fc62a90d2f95d4d33c9c2ef3d8a616b25ede266b67f88e8e02f9d55"}, - {file = "langchain_google_calendar_tools-0.0.1.tar.gz", hash = "sha256:2222499ded96880c573c358bb6b4c75a86f638e1f3a446fb14b126a30ee7f62e"}, -] - -[package.dependencies] -google-api-python-client = ">=2.104.0" -google-auth-oauthlib = ">=1.1.0" -langchain = ">=0.0.335" -protobuf = ">=4.25.0" -pytz = ">=2023.3.post1" - -[[package]] -name = "langchain-google-community" -version = "1.0.7" -description = "An integration package connecting miscellaneous Google's products and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_google_community-1.0.7-py3-none-any.whl", hash = "sha256:0ec6701d3d1934be96cf170a1058480645c6cbe8b242fedb731d8ca4bb6d72d8"}, - {file = "langchain_google_community-1.0.7.tar.gz", hash = "sha256:baa81e2d92b5cdcff7e484ec9c505db4d19569a5625e32697dbf5003941e9e1b"}, -] - -[package.dependencies] -google-api-core = ">=2.17.1,<3.0.0" -google-api-python-client = ">=2.122.0,<3.0.0" -grpcio = ">=1.62.0,<2.0.0" -langchain-community = ">=0.2.1,<0.3.0" -langchain-core = ">=0.2.9,<0.3" -tenacity = ">=8.3.0,<8.4.0" - -[package.extras] -bigquery = ["google-cloud-bigquery (>=3.21.0,<4.0.0)"] -docai = ["gapic-google-longrunning (>=0.11.2,<0.12.0)", "google-cloud-contentwarehouse (>=0.7.7,<0.8.0)", "google-cloud-documentai (>=2.26.0,<3.0.0)", "google-cloud-documentai-toolbox (>=0.13.3a0,<0.14.0)"] -drive = ["google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)"] -featurestore = ["db-dtypes (>=1.2.0,<2.0.0)", "google-cloud-aiplatform (>=1.56.0,<2.0.0)", "google-cloud-bigquery-storage (>=2.6.0,<3)", "pandas (>=1.0.0)", "pandas (>=2.0.0,<3.0)", "pyarrow (>=6.0.1)", "pydantic (>=2.7.4,<3.0.0)"] -gcs = ["google-cloud-storage (>=2.16.0,<3.0.0)"] -gmail = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)"] -places = ["googlemaps (>=4.10.0,<5.0.0)"] -speech = ["google-cloud-speech (>=2.26.0,<3.0.0)"] -texttospeech = ["google-cloud-texttospeech (>=2.16.3,<3.0.0)"] -translate = ["google-cloud-translate (>=3.15.3,<4.0.0)"] -vertexaisearch = ["google-cloud-discoveryengine (>=0.11.13,<0.12.0)"] -vision = ["google-cloud-vision (>=3.7.2,<4.0.0)"] - -[[package]] -name = "langchain-google-genai" -version = "1.0.8" -description = "An integration package connecting Google's genai package and LangChain" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "langchain_google_genai-1.0.8-py3-none-any.whl", hash = "sha256:675dab1afa8f165c6169512dae984c90fd9fe2e8a54bdf7d0a0ccd826ee719d4"}, - {file = "langchain_google_genai-1.0.8.tar.gz", hash = "sha256:4b5b074476f073f4f6f0d1e74c811bb66e4caaf2d6b57b14bf6a1eab36de8c4e"}, -] - -[package.dependencies] -google-generativeai = ">=0.7.0,<0.8.0" -langchain-core = ">=0.2.17,<0.3" - -[package.extras] -images = ["pillow (>=10.1.0,<11.0.0)"] - -[[package]] -name = "langchain-google-vertexai" -version = "1.0.7" -description = "An integration package connecting Google VertexAI and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_google_vertexai-1.0.7-py3-none-any.whl", hash = "sha256:2c3516171fb0a6557ff03d073bdbf6e9bbb5f291cccbcd8febd97affa2e69574"}, - {file = "langchain_google_vertexai-1.0.7.tar.gz", hash = "sha256:ac7d8ad8e832b1d5a752cb0637082d7e2c451bc33e512eec7bf9662b1aac41db"}, -] - -[package.dependencies] -google-cloud-aiplatform = ">=1.56.0,<2.0.0" -google-cloud-storage = ">=2.17.0,<3.0.0" -langchain-core = ">=0.2.17,<0.3" - -[package.extras] -anthropic = ["anthropic[vertexai] (>=0.30.0,<1)"] - -[[package]] -name = "langchain-groq" -version = "0.1.6" -description = "An integration package connecting Groq and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_groq-0.1.6-py3-none-any.whl", hash = "sha256:c7e91f8a333299b25dad5e8c3bc555cc579560431247ac8e0c3d2705dd7411f8"}, - {file = "langchain_groq-0.1.6.tar.gz", hash = "sha256:ecd3aac54e5e454633f3920a11d093dcb425c18830f721cc86e69e5acfbe7c85"}, -] - -[package.dependencies] -groq = ">=0.4.1,<1" -langchain-core = ">=0.2.2,<0.3" - -[[package]] -name = "langchain-milvus" -version = "0.1.4" -description = "An integration package connecting Milvus and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_milvus-0.1.4-py3-none-any.whl", hash = "sha256:f5c1f2d023c6853d1acc22dc8d0b61ca4d99015c1b095b0cf84ec84a9ba2936e"}, - {file = "langchain_milvus-0.1.4.tar.gz", hash = "sha256:1cd67f127d60c73ffb07cd789705766479137630d43f8ff547c69eee4775dae8"}, -] - -[package.dependencies] -langchain-core = ">=0.2.20,<0.3.0" -pymilvus = ">=2.4.3,<3.0.0" -scipy = [ - {version = ">=1.7,<2.0", markers = "python_version < \"3.12\""}, - {version = ">=1.9,<2.0", markers = "python_version >= \"3.12\""}, -] - -[[package]] -name = "langchain-mistralai" -version = "0.1.10" -description = "An integration package connecting Mistral and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_mistralai-0.1.10-py3-none-any.whl", hash = "sha256:c0507f744cbb8d46b34b62170f977e3afe9c71606483a45b50c4b88f00cb0be5"}, - {file = "langchain_mistralai-0.1.10.tar.gz", hash = "sha256:fbee929b891b3b6f4aa622fbe4b23f750447a831c823d459974787d825e16eaf"}, -] - -[package.dependencies] -httpx = ">=0.25.2,<1" -httpx-sse = ">=0.3.1,<1" -langchain-core = ">=0.2.17,<0.3" -tokenizers = ">=0.15.1,<1" - -[[package]] -name = "langchain-mongodb" -version = "0.1.8" -description = "An integration package connecting MongoDB and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_mongodb-0.1.8-py3-none-any.whl", hash = "sha256:0c086a80813cf213b01fb9024243505243bc8892b27e9ca92b4d0753ab53bdd0"}, - {file = "langchain_mongodb-0.1.8.tar.gz", hash = "sha256:7df9cc3af8ef2d2c943188f464ac4f952cbe90626a8252ce9f8e5af4202e6ebe"}, -] - -[package.dependencies] -langchain-core = ">=0.2.21,<0.3.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -pymongo = ">=4.6.1,<5.0" - -[[package]] -name = "langchain-nvidia-ai-endpoints" -version = "0.1.6" -description = "An integration package connecting NVIDIA AI Endpoints and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_nvidia_ai_endpoints-0.1.6-py3-none-any.whl", hash = "sha256:0cd4d7b24b74ad8da67295cb001cd51a498c0e8cde9ead112fa583f61b6cb0f2"}, - {file = "langchain_nvidia_ai_endpoints-0.1.6.tar.gz", hash = "sha256:5f8ab60b88266326f3eae6b04cbab0a75f0d7de56e60cfa8e4e240ef2a132c3e"}, -] - -[package.dependencies] -aiohttp = ">=3.9.1,<4.0.0" -langchain-core = ">=0.1.27,<0.3" -pillow = ">=10.0.0,<11.0.0" - -[[package]] -name = "langchain-openai" -version = "0.1.22" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_openai-0.1.22-py3-none-any.whl", hash = "sha256:e184ab867a30f803dc210a388537186b1b670a33d910a7e0fa4e0329d3b6c654"}, - {file = "langchain_openai-0.1.22.tar.gz", hash = "sha256:0cf93133f230a893e3b0cc2a792bbf2580950e879b577f6e8d4ff9963a7de44b"}, -] - -[package.dependencies] -langchain-core = ">=0.2.33,<0.3.0" -openai = ">=1.40.0,<2.0.0" -tiktoken = ">=0.7,<1" - -[[package]] -name = "langchain-pinecone" -version = "0.1.2" -description = "An integration package connecting Pinecone and LangChain" -optional = false -python-versions = "<3.13,>=3.8.1" -files = [ - {file = "langchain_pinecone-0.1.2-py3-none-any.whl", hash = "sha256:07157885ef1e45bedadc3f7ea6bb181ec458fae5fd82dc7a6005444f67100613"}, - {file = "langchain_pinecone-0.1.2.tar.gz", hash = "sha256:654fd93df8cefe1ff0ca7ac50cf308242df69a601ee3855019b524006b528595"}, -] - -[package.dependencies] -langchain-core = ">=0.1.52,<0.3" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -pinecone-client = ">=3.2.2,<5" - -[[package]] -name = "langchain-text-splitters" -version = "0.2.2" -description = "LangChain text splitting utilities" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"}, - {file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"}, -] - -[package.dependencies] -langchain-core = ">=0.2.10,<0.3.0" - -[[package]] -name = "langchainhub" -version = "0.1.21" -description = "The LangChain Hub API client" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchainhub-0.1.21-py3-none-any.whl", hash = "sha256:1cc002dc31e0d132a776afd044361e2b698743df5202618cf2bad399246b895f"}, - {file = "langchainhub-0.1.21.tar.gz", hash = "sha256:723383b3964a47dbaea6ad5d0ef728accefbc9d2c07480e800bdec43510a8c10"}, -] - -[package.dependencies] -packaging = ">=23.2,<25" -requests = ">=2,<3" -types-requests = ">=2.31.0.2,<3.0.0.0" - -[[package]] -name = "langdetect" -version = "1.0.9" -description = "Language detection library ported from Google's language-detection." -optional = false -python-versions = "*" -files = [ - {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, - {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "langflow-base" -version = "0.0.95" -description = "A Python package with a built-in web application" -optional = false -python-versions = ">=3.10,<3.13" -files = [] -develop = true - -[package.dependencies] -aiofiles = "^24.1.0" -alembic = "^1.13.0" -asyncer = "^0.0.5" -bcrypt = "4.0.1" -cachetools = "^5.3.1" -chardet = "^5.2.0" -crewai = "^0.36.0" -cryptography = "^42.0.5" -diskcache = "^5.6.3" -docstring-parser = "^0.16" -duckdb = "^1.0.0" -emoji = "^2.12.0" -fastapi = "^0.111.0" -filelock = "^3.15.4" -firecrawl-py = "^0.0.16" -grandalf = "^0.8.0" -gunicorn = "^22.0.0" -httpx = "*" -jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""} -langchain = "~0.2.0" -langchain-core = "^0.2.32" -langchain-experimental = "^0.0.61" -langchainhub = "~0.1.15" -loguru = "^0.7.1" -multiprocess = "^0.70.14" -nanoid = "^2.0.0" -nest-asyncio = "^1.6.0" -opentelemetry-api = "^1.25.0" -opentelemetry-exporter-prometheus = "^0.46b0" -opentelemetry-instrumentation-fastapi = "^0.46b0" -opentelemetry-sdk = "^1.25.0" -orjson = "3.10.0" -pandas = "2.2.2" -passlib = "^1.7.4" -pillow = "^10.2.0" -platformdirs = "^4.2.0" -prometheus-client = "^0.20.0" -pydantic = "^2.7.0" -pydantic-settings = "^2.2.0" -pypdf = "^4.2.0" -pyperclip = "^1.8.2" -python-docx = "^1.1.0" -python-jose = "^3.3.0" -python-multipart = "^0.0.7" -rich = "^13.7.0" -sentry-sdk = {version = "^2.5.1", extras = ["fastapi", "loguru"]} -setuptools = ">=70" -spider-client = "^0.0.27" -sqlmodel = "^0.0.18" -typer = "^0.12.0" -uncurl = "^0.0.11" -uvicorn = "^0.30.0" -websockets = "*" - -[package.extras] -all = [] -deploy = [] -local = [] - -[package.source] -type = "directory" -url = "src/backend/base" - -[[package]] -name = "langfuse" -version = "2.43.3" -description = "A client library for accessing langfuse" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langfuse-2.43.3-py3-none-any.whl", hash = "sha256:62a368009dd26f698905321a52929ab4e75996a871f41db2892beb5257ab69d2"}, - {file = "langfuse-2.43.3.tar.gz", hash = "sha256:046d872d0d0053d02816d5e5a610be0e4ae7ebb69e65d979111fc522be965691"}, -] - -[package.dependencies] -anyio = ">=4.4.0,<5.0.0" -backoff = ">=1.10.0" -httpx = ">=0.15.4,<1.0" -idna = ">=3.7,<4.0" -packaging = ">=23.2,<24.0" -pydantic = ">=1.10.7,<3.0" -wrapt = ">=1.14,<2.0" - -[package.extras] -langchain = ["langchain (>=0.0.309)"] -llama-index = ["llama-index (>=0.10.12,<2.0.0)"] -openai = ["openai (>=0.27.8)"] - -[[package]] -name = "langsmith" -version = "0.1.99" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, - {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, -] - -[package.dependencies] -orjson = ">=3.9.14,<4.0.0" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -requests = ">=2,<3" - -[[package]] -name = "langwatch" -version = "0.1.20" -description = "Python SDK for LangWatch for monitoring your LLMs" -optional = false -python-versions = "<3.13,>=3.9" -files = [ - {file = "langwatch-0.1.20-py3-none-any.whl", hash = "sha256:6799ea01fff56e0daf3d8c1b93d7f8936bb479d88a3e1b60d5a2d8ff63418d23"}, - {file = "langwatch-0.1.20.tar.gz", hash = "sha256:ced7bee58119416a61d8ba139dd9595fefeccc5a02cbf35578f44ea23131fe8f"}, -] - -[package.dependencies] -coolname = ">=2.2.0,<3.0.0" -deprecated = ">=1.2.14,<2.0.0" -httpx = ">=0.27.0,<0.28.0" -nanoid = ">=2.0.0,<3.0.0" -pandas = ">=2.2.2,<3.0.0" -pydantic = ">=1,<3" -requests = ">=2.31.0,<3.0.0" -retry = ">=0.9.2,<0.10.0" -tqdm = ">=4.66.2,<5.0.0" - -[package.extras] -dspy = ["dspy-ai (>=2.4.12,<3.0.0)"] -langchain = ["langchain (>=0.2.0,<0.3.0)"] -litellm = ["litellm (>=1.40.15,<2.0.0)"] -openai = ["openai (>=1.3.7,<2.0.0)"] - -[[package]] -name = "litellm" -version = "1.44.1" -description = "Library to easily interface with LLM API providers" -optional = false -python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" -files = [ - {file = "litellm-1.44.1-py3-none-any.whl", hash = "sha256:6367cfafbdd5d6a5125cefd21375df89f0b8a0d77044616cd88462be77d05564"}, - {file = "litellm-1.44.1.tar.gz", hash = "sha256:b9013df8b255335a08df1e9e7ac589dab97b7fe8adb0c614e2ef4b65adcc33fb"}, -] - -[package.dependencies] -aiohttp = "*" -click = "*" -importlib-metadata = ">=6.8.0" -jinja2 = ">=3.1.2,<4.0.0" -jsonschema = ">=4.22.0,<5.0.0" -openai = ">=1.40.0" -pydantic = ">=2.0.0,<3.0.0" -python-dotenv = ">=0.2.0" -requests = ">=2.31.0,<3.0.0" -tiktoken = ">=0.7.0" -tokenizers = "*" - -[package.extras] -extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "pynacl (>=1.5.0,<2.0.0)", "resend (>=0.8.0,<0.9.0)"] -proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] - -[[package]] -name = "llama-cpp-python" -version = "0.2.88" -description = "Python bindings for the llama.cpp library" -optional = true -python-versions = ">=3.8" -files = [ - {file = "llama_cpp_python-0.2.88.tar.gz", hash = "sha256:b031181d069aa61b3bbec415037b1f060d6d5b36951815f438285c4c85ca693e"}, -] - -[package.dependencies] -diskcache = ">=5.6.1" -jinja2 = ">=2.11.3" -numpy = ">=1.20.0" -typing-extensions = ">=4.5.0" - -[package.extras] -all = ["llama_cpp_python[dev,server,test]"] -dev = ["black (>=23.3.0)", "httpx (>=0.24.1)", "mkdocs (>=1.4.3)", "mkdocs-material (>=9.1.18)", "mkdocstrings[python] (>=0.22.0)", "pytest (>=7.4.0)", "twine (>=4.0.2)"] -server = ["PyYAML (>=5.1)", "fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"] -test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] - -[[package]] -name = "loguru" -version = "0.7.2" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = ">=3.5" -files = [ - {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, - {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] - -[[package]] -name = "lxml" -version = "5.3.0" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=3.6" -files = [ - {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, - {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, - {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, - {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, - {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, - {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, - {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, - {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, - {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, - {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, - {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, - {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, - {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, - {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, - {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, - {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, - {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, - {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, - {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, - {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, - {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, - {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, - {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, - {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, - {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, - {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, - {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, - {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, - {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, - {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, - {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, - {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, - {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, - {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, - {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html-clean = ["lxml-html-clean"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.11)"] - -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markdown" -version = "3.6" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, -] - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.3" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mem0ai" -version = "0.0.9" -description = "Long-term memory for AI Agents" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "mem0ai-0.0.9-py3-none-any.whl", hash = "sha256:d4de435729af4fd3d597d022ffb2af89a0630d6c3b4769792bbe27d2ce816858"}, - {file = "mem0ai-0.0.9.tar.gz", hash = "sha256:e4374d5d04aa3f543cd3325f700e4b62f5358ae1c6fa5c44b2ff790c10c4e5f1"}, -] - -[package.dependencies] -openai = ">=1.33.0,<2.0.0" -posthog = ">=3.5.0,<4.0.0" -pydantic = ">=2.7.3,<3.0.0" -qdrant-client = ">=1.9.1,<2.0.0" - -[[package]] -name = "metal-sdk" -version = "2.5.1" -description = "SDK for getmetal.io" -optional = false -python-versions = ">=3.7" -files = [ - {file = "metal_sdk-2.5.1-py3-none-any.whl", hash = "sha256:22eb44609d789d772664fe6eb502bb07b5f645a370c7d7fb993ffb28ecb59bbd"}, - {file = "metal_sdk-2.5.1.tar.gz", hash = "sha256:97ac85e16669f256d2502b7c9606a5f0d4aa4cecc3938834b6064867ee47a34a"}, -] - -[package.dependencies] -httpx = "*" -typing-extensions = "*" - -[[package]] -name = "metaphor-python" -version = "0.1.23" -description = "A Python package for the Metaphor API." -optional = false -python-versions = "*" -files = [ - {file = "metaphor-python-0.1.23.tar.gz", hash = "sha256:ce3a8f9d81b2ee7201dac225095cdf2da34b003fe889c5b4629a8b480e8de7ba"}, - {file = "metaphor_python-0.1.23-py3-none-any.whl", hash = "sha256:993b594373efdd5223631f8997dfd68801241db112a214d7c56aeac03fc74b4f"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "milvus-lite" -version = "2.4.9" -description = "A lightweight version of Milvus wrapped with Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "milvus_lite-2.4.9-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d3e617b3d68c09ad656d54bc3d8cc4ef6ef56c54015e1563d4fe4bcec6b7c90a"}, - {file = "milvus_lite-2.4.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6e7029282d6829b277ebb92f64e2370be72b938e34770e1eb649346bda5d1d7f"}, - {file = "milvus_lite-2.4.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:7f53e674602101cfbcf0a4a59d19eaa139dfd5580639f3040ad73d901f24fc0b"}, -] - -[package.dependencies] -tqdm = "*" - -[[package]] -name = "minijinja" -version = "2.0.1" -description = "An experimental Python binding of the Rust MiniJinja template engine." -optional = false -python-versions = ">=3.8" -files = [ - {file = "minijinja-2.0.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:063b291cb31f5c33eb77bb4cb457f67f14426ca1418232b8ae9f267155d330cc"}, - {file = "minijinja-2.0.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4e9d639dd89ce7fef86e82147082ab3c248a36950fa3fbe793685ba322c1b7"}, - {file = "minijinja-2.0.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a20373af4ee5430356c196c7fe5f19e3261a4fa16c944542b4de7a2349bac7a6"}, - {file = "minijinja-2.0.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ade637bf4826258811a785ccc4e5d41cd2bdf4ec317b1ed3daa4dbbdd020f37d"}, - {file = "minijinja-2.0.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5ec956d777e0fee8e214af48363334c04f098e986038a9e8cb92a0564f81943"}, - {file = "minijinja-2.0.1-cp38-abi3-win32.whl", hash = "sha256:039f4d1a1a73f90917cff1ed7c617eb56e2b2f91bbbdc551adaa448e1673e5c2"}, - {file = "minijinja-2.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:dca5d7689905dce340e36e47348b505c788daf297253b85a1aff506ea63ad1b8"}, - {file = "minijinja-2.0.1.tar.gz", hash = "sha256:e774beffebfb8a1ad17e638ef70917cf5e94593f79acb8a8fff7d983169f3a4e"}, -] - -[[package]] -name = "mmh3" -version = "4.1.0" -description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -optional = false -python-versions = "*" -files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, -] - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] - -[[package]] -name = "monotonic" -version = "1.6" -description = "An implementation of time.monotonic() for Python 2 & < 3.3" -optional = false -python-versions = "*" -files = [ - {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, - {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, -] - -[[package]] -name = "more-itertools" -version = "10.4.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"}, - {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, -] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "multiprocess" -version = "0.70.16" -description = "better multiprocessing and multithreading in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, - {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, - {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, - {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, - {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, - {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, - {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, -] - -[package.dependencies] -dill = ">=0.3.8" - -[[package]] -name = "multitasking" -version = "0.0.11" -description = "Non-blocking Python methods using decorators" -optional = false -python-versions = "*" -files = [ - {file = "multitasking-0.0.11-py3-none-any.whl", hash = "sha256:1e5b37a5f8fc1e6cfaafd1a82b6b1cc6d2ed20037d3b89c25a84f499bd7b3dd4"}, - {file = "multitasking-0.0.11.tar.gz", hash = "sha256:4d6bc3cc65f9b2dca72fb5a787850a88dae8f620c2b36ae9b55248e51bcd6026"}, -] - -[[package]] -name = "mypy" -version = "1.11.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nanoid" -version = "2.0.0" -description = "A tiny, secure, URL-friendly, unique string ID generator for Python" -optional = false -python-versions = "*" -files = [ - {file = "nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb"}, - {file = "nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nltk" -version = "3.9" -description = "Natural Language Toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nltk-3.9-py3-none-any.whl", hash = "sha256:d17863e861bb33ac617893329d71d06a3dfb7e3eb9ee0b8105281c53944a45a1"}, - {file = "nltk-3.9.tar.gz", hash = "sha256:e98acac454407fa38b76cccb29208d377731cf7fab68f323754a3681f104531f"}, -] - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numexpr" -version = "2.10.1" -description = "Fast numerical expression evaluator for NumPy" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numexpr-2.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bbd35f17f6efc00ebd4a480192af1ee30996094a0d5343b131b0e90e61e8b554"}, - {file = "numexpr-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fecdf4bf3c1250e56583db0a4a80382a259ba4c2e1efa13e04ed43f0938071f5"}, - {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2efa499f460124538a5b4f1bf2e77b28eb443ee244cc5573ed0f6a069ebc635"}, - {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac23a72eff10f928f23b147bdeb0f1b774e862abe332fc9bf4837e9f1bc0bbf9"}, - {file = "numexpr-2.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b28eaf45f1cc1048aad9e90e3a8ada1aef58c5f8155a85267dc781b37998c046"}, - {file = "numexpr-2.10.1-cp310-cp310-win32.whl", hash = "sha256:4f0985bd1c493b23b5aad7d81fa174798f3812efb78d14844194834c9fee38b8"}, - {file = "numexpr-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:44f6d12a8c44be90199bbb10d3abf467f88951f48a3d1fbbd3c219d121f39c9d"}, - {file = "numexpr-2.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3c0b0bf165b2d886eb981afa4e77873ca076f5d51c491c4d7b8fc10f17c876f"}, - {file = "numexpr-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56648a04679063175681195670ad53e5c8ca19668166ed13875199b5600089c7"}, - {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce04ae6efe2a9d0be1a0e114115c3ae70c68b8b8fbc615c5c55c15704b01e6a4"}, - {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45f598182b4f5c153222e47d5163c3bee8d5ebcaee7e56dd2a5898d4d97e4473"}, - {file = "numexpr-2.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6a50370bea77ba94c3734a44781c716751354c6bfda2d369af3aed3d67d42871"}, - {file = "numexpr-2.10.1-cp311-cp311-win32.whl", hash = "sha256:fa4009d84a8e6e21790e718a80a22d57fe7f215283576ef2adc4183f7247f3c7"}, - {file = "numexpr-2.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:fcbf013bb8494e8ef1d11fa3457827c1571c6a3153982d709e5d17594999d4dd"}, - {file = "numexpr-2.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82fc95c301b15ff4823f98989ee363a2d5555d16a7cfd3710e98ddee726eaaaa"}, - {file = "numexpr-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbf79fef834f88607f977ab9867061dcd9b40ccb08bb28547c6dc6c73e560895"}, - {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:552c8d4b2e3b87cdb2abb40a781b9a61a9090a9f66ac7357fc5a0b93aff76be3"}, - {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22cc65e9121aeb3187a2b50827715b2b087ea70e8ab21416ea52662322087b43"}, - {file = "numexpr-2.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:00204e5853713b5eba5f3d0bc586a5d8d07f76011b597c8b4087592cc2ec2928"}, - {file = "numexpr-2.10.1-cp312-cp312-win32.whl", hash = "sha256:82bf04a1495ac475de4ab49fbe0a3a2710ed3fd1a00bc03847316b5d7602402d"}, - {file = "numexpr-2.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:300e577b3c006dd7a8270f1bb2e8a00ee15bf235b1650fe2a6febec2954bc2c3"}, - {file = "numexpr-2.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb704620657a1c99d64933e8a982148d8bfb2b738a1943e107a2bfdee887ce56"}, - {file = "numexpr-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:368a1972c3186355160f6ee330a7eea146d8443da75a38a30083289ae251ef5a"}, - {file = "numexpr-2.10.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca8ae46481d0b0689ca0d00a8670bc464ce375e349599fe674a6d4957e7b7eb6"}, - {file = "numexpr-2.10.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a4db4456e0779d5e024220b7b6a7477ac900679bfa74836b06fa526aaed4e3c"}, - {file = "numexpr-2.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:926dd426c68f1d927412a2ad843831c1eb9a95871e7bb0bd8b20d547c12238d2"}, - {file = "numexpr-2.10.1-cp39-cp39-win32.whl", hash = "sha256:37598cca41f8f50dc889b0b72be1616a288758c16ab7d48c9ac8719e1a39d835"}, - {file = "numexpr-2.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:78b14c19c403df7498954468385768c86b0d2c52ad03dffb74e45d44ae5a9c77"}, - {file = "numexpr-2.10.1.tar.gz", hash = "sha256:9bba99d354a65f1a008ab8b87f07d84404c668e66bab624df5b6b5373403cf81"}, -] - -[package.dependencies] -numpy = ">=1.23.0" - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.1.0.70" -description = "cuDNN runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, - {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.20.5" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.6.20" -description = "Nvidia JIT LTO Library" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-win_amd64.whl", hash = "sha256:ed3c43a17f37b0c922a919203d2d36cbef24d41cc3e6b625182f8b58203644f6"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = true -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "onnxruntime" -version = "1.18.1" -description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -optional = false -python-versions = "*" -files = [ - {file = "onnxruntime-1.18.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:29ef7683312393d4ba04252f1b287d964bd67d5e6048b94d2da3643986c74d80"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win32.whl", hash = "sha256:221e5b16173926e6c7de2cd437764492aa12b6811f45abd37024e7cf2ae5d7e3"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:75211b619275199c861ee94d317243b8a0fcde6032e5a80e1aa9ded8ab4c6060"}, - {file = "onnxruntime-1.18.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f26582882f2dc581b809cfa41a125ba71ad9e715738ec6402418df356969774a"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win32.whl", hash = "sha256:9b6a33419b6949ea34e0dc009bc4470e550155b6da644571ecace4b198b0d88f"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c1380a9f1b7788da742c759b6a02ba771fe1ce620519b2b07309decbd1a2fe1"}, - {file = "onnxruntime-1.18.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:31bd57a55e3f983b598675dfc7e5d6f0877b70ec9864b3cc3c3e1923d0a01919"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win32.whl", hash = "sha256:3a2d9ab6254ca62adbb448222e630dc6883210f718065063518c8f93a32432be"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ad93c560b1c38c27c0275ffd15cd7f45b3ad3fc96653c09ce2931179982ff204"}, - {file = "onnxruntime-1.18.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:3b55dc9d3c67626388958a3eb7ad87eb7c70f75cb0f7ff4908d27b8b42f2475c"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f80dbcfb6763cc0177a31168b29b4bd7662545b99a19e211de8c734b657e0669"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1ff2c61a16d6c8631796c54139bafea41ee7736077a0fc64ee8ae59432f5c58"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win32.whl", hash = "sha256:219855bd272fe0c667b850bf1a1a5a02499269a70d59c48e6f27f9c8bcb25d02"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdf16aa607eb9a2c60d5ca2d5abf9f448e90c345b6b94c3ed14f4fb7e6a2d07"}, - {file = "onnxruntime-1.18.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:128df253ade673e60cea0955ec9d0e89617443a6d9ce47c2d79eb3f72a3be3de"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win32.whl", hash = "sha256:34657c78aa4e0b5145f9188b550ded3af626651b15017bf43d280d7e23dbf195"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:9c14fd97c3ddfa97da5feef595e2c73f14c2d0ec1d4ecbea99c8d96603c89589"}, -] - -[package.dependencies] -coloredlogs = "*" -flatbuffers = "*" -numpy = ">=1.21.6,<2.0" -packaging = "*" -protobuf = "*" -sympy = "*" - -[[package]] -name = "openai" -version = "1.40.6" -description = "The official Python library for the openai API" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-1.40.6-py3-none-any.whl", hash = "sha256:b36372124a779381a420a34dd96f762baa748b6bdfaf83a6b9f2745f72ccc1c5"}, - {file = "openai-1.40.6.tar.gz", hash = "sha256:2239232bcb7f4bd4ce8e02544b5769618582411cf399816d96686d1b6c1e5c8d"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.11,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - -[[package]] -name = "opentelemetry-api" -version = "1.25.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, - {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.1" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.25.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"}, -] - -[package.dependencies] -opentelemetry-proto = "1.25.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.25.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.25.0" -opentelemetry-proto = "1.25.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.25.0" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl", hash = "sha256:2eca686ee11b27acd28198b3ea5e5863a53d1266b91cda47c839d95d5e0541a6"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.25.0.tar.gz", hash = "sha256:9f8723859e37c75183ea7afa73a3542f01d0fd274a5b97487ea24cb683d7d684"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.25.0" -opentelemetry-proto = "1.25.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" -requests = ">=2.7,<3.0" - -[[package]] -name = "opentelemetry-exporter-prometheus" -version = "0.46b0" -description = "Prometheus Metric Exporter for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_prometheus-0.46b0-py3-none-any.whl", hash = "sha256:caefdeea5c4d52b72479710d22cc4c469d42fa1dba2f4a2e46ae0ebeaf51cd96"}, - {file = "opentelemetry_exporter_prometheus-0.46b0.tar.gz", hash = "sha256:28cc6456a5d5bf49c34be2f1d22bbc761c36af9b32d909ea5b4c13fe6deac47b"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" -prometheus-client = ">=0.5.0,<1.0.0" - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.46b0" -description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"}, - {file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.4,<2.0" -setuptools = ">=16.0" -wrapt = ">=1.0.0,<2.0.0" - -[[package]] -name = "opentelemetry-instrumentation-asgi" -version = "0.46b0" -description = "ASGI instrumentation for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"}, - {file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"}, -] - -[package.dependencies] -asgiref = ">=3.0,<4.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.46b0" -opentelemetry-semantic-conventions = "0.46b0" -opentelemetry-util-http = "0.46b0" - -[package.extras] -instruments = ["asgiref (>=3.0,<4.0)"] - -[[package]] -name = "opentelemetry-instrumentation-fastapi" -version = "0.46b0" -description = "OpenTelemetry FastAPI Instrumentation" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"}, - {file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.46b0" -opentelemetry-instrumentation-asgi = "0.46b0" -opentelemetry-semantic-conventions = "0.46b0" -opentelemetry-util-http = "0.46b0" - -[package.extras] -instruments = ["fastapi (>=0.58,<1.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.25.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"}, - {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.25.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, - {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, -] - -[package.dependencies] -opentelemetry-api = "1.25.0" -opentelemetry-semantic-conventions = "0.46b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.46b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, - {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, -] - -[package.dependencies] -opentelemetry-api = "1.25.0" - -[[package]] -name = "opentelemetry-util-http" -version = "0.46b0" -description = "Web util for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"}, - {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"}, -] - -[[package]] -name = "optuna" -version = "3.6.1" -description = "A hyperparameter optimization framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "optuna-3.6.1-py3-none-any.whl", hash = "sha256:b32e0490bd6552790b70ec94de77dd2855057c9e229cd9f4da48fe8a31c7f1cc"}, - {file = "optuna-3.6.1.tar.gz", hash = "sha256:146e530b57b4b9afd7526b3e642fbe65491f7e292b405913355f8e438e361ecf"}, -] - -[package.dependencies] -alembic = ">=1.5.0" -colorlog = "*" -numpy = "*" -packaging = ">=20.0" -PyYAML = "*" -sqlalchemy = ">=1.3.0" -tqdm = "*" - -[package.extras] -benchmark = ["asv (>=0.5.0)", "botorch", "cma", "virtualenv"] -checking = ["black", "blackdoc", "flake8", "isort", "mypy", "mypy-boto3-s3", "types-PyYAML", "types-redis", "types-setuptools", "types-tqdm", "typing-extensions (>=3.10.0.0)"] -document = ["ase", "cmaes (>=0.10.0)", "fvcore", "lightgbm", "matplotlib (!=3.6.0)", "pandas", "pillow", "plotly (>=4.9.0)", "scikit-learn", "sphinx", "sphinx-copybutton", "sphinx-gallery", "sphinx-plotly-directive", "sphinx-rtd-theme (>=1.2.0)", "torch", "torchvision"] -optional = ["boto3", "cmaes (>=0.10.0)", "google-cloud-storage", "matplotlib (!=3.6.0)", "pandas", "plotly (>=4.9.0)", "redis", "scikit-learn (>=0.24.2)", "scipy", "torch"] -test = ["coverage", "fakeredis[lua]", "kaleido", "moto", "pytest", "scipy (>=1.9.2)", "torch"] - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "orjson" -version = "3.10.0" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, - {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, - {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, - {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, - {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, - {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, - {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, - {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, - {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, - {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, - {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, - {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, - {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, - {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, - {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, - {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, -] - -[[package]] -name = "outcome" -version = "1.3.0.post0" -description = "Capture the outcome of Python function calls." -optional = false -python-versions = ">=3.7" -files = [ - {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, - {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, -] - -[package.dependencies] -attrs = ">=19.2.0" - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandas-stubs" -version = "2.2.2.240807" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, - {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, -] - -[package.dependencies] -numpy = ">=1.23.5" -types-pytz = ">=2022.1.1" - -[[package]] -name = "parameterized" -version = "0.9.0" -description = "Parameterized testing with any Python test framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, - {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, -] - -[package.extras] -dev = ["jinja2"] - -[[package]] -name = "paramiko" -version = "3.4.1" -description = "SSH2 protocol library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"}, - {file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"}, -] - -[package.dependencies] -bcrypt = ">=3.2" -cryptography = ">=3.3" -pynacl = ">=1.5" - -[package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -invoke = ["invoke (>=2.0)"] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "passlib" -version = "1.7.4" -description = "comprehensive password hashing framework supporting over 30 schemes" -optional = false -python-versions = "*" -files = [ - {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, - {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, -] - -[package.extras] -argon2 = ["argon2-cffi (>=18.2.0)"] -bcrypt = ["bcrypt (>=3.1.0)"] -build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] -totp = ["cryptography"] - -[[package]] -name = "peewee" -version = "3.17.6" -description = "a little orm" -optional = false -python-versions = "*" -files = [ - {file = "peewee-3.17.6.tar.gz", hash = "sha256:cea5592c6f4da1592b7cff8eaf655be6648a1f5857469e30037bf920c03fb8fb"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pgvector" -version = "0.2.5" -description = "pgvector support for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, -] - -[package.dependencies] -numpy = "*" - -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "pinecone-client" -version = "3.2.2" -description = "Pinecone client and SDK" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pinecone_client-3.2.2-py3-none-any.whl", hash = "sha256:7e492fdda23c73726bc0cb94c689bb950d06fb94e82b701a0c610c2e830db327"}, - {file = "pinecone_client-3.2.2.tar.gz", hash = "sha256:887a12405f90ac11c396490f605fc479f31cf282361034d1ae0fccc02ac75bee"}, -] - -[package.dependencies] -certifi = ">=2019.11.17" -tqdm = ">=4.64.1" -typing-extensions = ">=3.7.4" -urllib3 = [ - {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, -] - -[package.extras] -grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "grpcio (>=1.59.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "portalocker" -version = "2.10.1" -description = "Wraps the portalocker recipe for easy usage" -optional = false -python-versions = ">=3.8" -files = [ - {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, - {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, -] - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] - -[[package]] -name = "postgrest" -version = "0.16.10" -description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "postgrest-0.16.10-py3-none-any.whl", hash = "sha256:71a9d31a834488ab5f42e6b7a83ab7fbd941f8ac20c3b28d8aa72201cf826aca"}, - {file = "postgrest-0.16.10.tar.gz", hash = "sha256:5518bf65ad5439f91d2019a25d51d9563ad7cdd8b5c71823559dd1b0e8d9e7a2"}, -] - -[package.dependencies] -deprecation = ">=2.1.0,<3.0.0" -httpx = {version = ">=0.24,<0.28", extras = ["http2"]} -pydantic = ">=1.9,<3.0" -strenum = ">=0.4.9,<0.5.0" - -[[package]] -name = "posthog" -version = "3.5.0" -description = "Integrate PostHog into any python application." -optional = false -python-versions = "*" -files = [ - {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, - {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, -] - -[package.dependencies] -backoff = ">=1.10.0" -monotonic = ">=1.5" -python-dateutil = ">2.1" -requests = ">=2.7,<3.0" -six = ">=1.5" - -[package.extras] -dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] -sentry = ["django", "sentry-sdk"] -test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] - -[[package]] -name = "pre-commit" -version = "3.8.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" - -[package.extras] -testing = ["google-api-core (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.4" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, -] - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psycopg" -version = "3.1.9" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg-3.1.9-py3-none-any.whl", hash = "sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc"}, - {file = "psycopg-3.1.9.tar.gz", hash = "sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9"}, -] - -[package.dependencies] -typing-extensions = ">=4.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.1.9)"] -c = ["psycopg-c (==3.1.9)"] -dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.2)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2)", "mypy (>=1.2)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pulsar-client" -version = "3.5.0" -description = "Apache Pulsar Python client library" -optional = false -python-versions = "*" -files = [ - {file = "pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38"}, - {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b"}, - {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f"}, - {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319"}, - {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa"}, - {file = "pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7"}, - {file = "pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da"}, - {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b"}, - {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6"}, - {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1"}, - {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842"}, - {file = "pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0"}, - {file = "pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d"}, - {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17"}, - {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a"}, - {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290"}, - {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc"}, - {file = "pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af"}, - {file = "pulsar_client-3.5.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:15b58f5d759dd6166db8a2d90ed05a38063b05cda76c36d190d86ef5c9249397"}, - {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af34bfe813dddf772a8a298117fa0a036ee963595d8bc8f00d969a0329ae6ed9"}, - {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0fec1dd74e1367d3742ce16679c1807994df60f5e666f440cf39323938fad"}, - {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbcd26ef9c03f96fb9cd91baec3bbd3c4b997834eb3556670d31f41cc25b5f64"}, - {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:afea1d0b6e793fd56e56463145751ff3aa79fdcd5b26e90d0da802a1bbabe07e"}, - {file = "pulsar_client-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:da1ab2fb1bef64b966e9403a0a186ebc90368d99e054ce2cae5b1128478f4ef4"}, - {file = "pulsar_client-3.5.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:9ad5dcc0eb8d2a7c0fb8e1fa146a0c6d4bdaf934f1169080b2c64b2f0573e086"}, - {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5870c6805b1a57962ed908d1173e97e13470415998393925c86a43694420389"}, - {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29cb5fedb969895b78301dc00a979133e69940812b8332e4de948bb0ad3db7cb"}, - {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e53c74bfa59b20c66adea95023169060f5048dd8d843e6ef9cd3b8ee2d23e93b"}, - {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99dbadb13967f1add57010971ed36b5a77d24afcdaea01960d0e55e56cf4ba6f"}, - {file = "pulsar_client-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:058887661d438796f42307dcc8054c84dea88a37683dae36498b95d7e1c39b37"}, -] - -[package.dependencies] -certifi = "*" - -[package.extras] -all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] -avro = ["fastavro (>=1.9.2)"] -functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -description = "Get CPU info with pure Python" -optional = true -python-versions = "*" -files = [ - {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, - {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, -] - -[[package]] -name = "pyarrow" -version = "14.0.2" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pyarrow-hotfix" -version = "0.6" -description = "" -optional = false -python-versions = ">=3.5" -files = [ - {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, - {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, -] - -[[package]] -name = "pyasn1" -version = "0.6.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pyautogen" -version = "0.2.34" -description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework" -optional = false -python-versions = "<3.13,>=3.8" -files = [ - {file = "pyautogen-0.2.34-py3-none-any.whl", hash = "sha256:13218d2d50561249f08e75952e1aa0902982d146154f8285310977183fc829c0"}, - {file = "pyautogen-0.2.34.tar.gz", hash = "sha256:1809bbfe2676968ccc27ee0d2312a9ba7cda50ecd8a12d4ce59fc06f618a6771"}, -] - -[package.dependencies] -diskcache = "*" -docker = "*" -flaml = "*" -numpy = ">=1.17.0,<2" -openai = ">=1.3" -packaging = "*" -pydantic = ">=1.10,<2.6.0 || >2.6.0,<3" -python-dotenv = "*" -termcolor = "*" -tiktoken = "*" - -[package.extras] -anthropic = ["anthropic (>=0.23.1)"] -autobuild = ["chromadb", "huggingface-hub", "pysqlite3", "sentence-transformers"] -blendsearch = ["flaml[blendsearch]"] -cohere = ["cohere (>=5.5.8)"] -cosmosdb = ["azure-cosmos (>=4.2.0)"] -gemini = ["google-auth", "google-cloud-aiplatform", "google-generativeai (>=0.5,<1)", "pillow", "pydantic"] -graph = ["matplotlib", "networkx"] -groq = ["groq (>=0.9.0)"] -jupyter-executor = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "requests", "websocket-client"] -lmm = ["pillow", "replicate"] -long-context = ["llmlingua (<0.3)"] -mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"] -mistral = ["mistralai (>=0.2.0)"] -redis = ["redis"] -retrievechat = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "sentence-transformers"] -retrievechat-mongodb = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "protobuf (==4.25.3)", "pymongo (>=4.0.0)", "pypdf", "sentence-transformers"] -retrievechat-pgvector = ["beautifulsoup4", "chromadb", "ipython", "markdownify", "pgvector (>=0.2.5)", "protobuf (==4.25.3)", "psycopg (>=3.1.18)", "pypdf", "sentence-transformers"] -retrievechat-qdrant = ["beautifulsoup4", "chromadb", "fastembed (>=0.3.1)", "ipython", "markdownify", "protobuf (==4.25.3)", "pypdf", "qdrant-client", "sentence-transformers"] -teachable = ["chromadb"] -test = ["ipykernel", "nbconvert", "nbformat", "pandas", "pre-commit", "pytest (>=6.1.1,<8)", "pytest-asyncio", "pytest-cov (>=5)"] -together = ["together (>=1.2)"] -types = ["ipykernel (>=6.29.0)", "jupyter-client (>=8.6.0)", "jupyter-kernel-gateway", "mypy (==1.9.0)", "pytest (>=6.1.1,<8)", "requests", "websocket-client"] -websockets = ["websockets (>=12.0,<13)"] -websurfer = ["beautifulsoup4", "markdownify", "pathvalidate", "pdfminer.six"] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pycryptodome" -version = "3.20.0" -description = "Cryptographic library for Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, - {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, -] - -[[package]] -name = "pydantic" -version = "2.8.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.20.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-settings" -version = "2.4.0" -description = "Settings management using Pydantic" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, -] - -[package.dependencies] -pydantic = ">=2.7.0" -python-dotenv = ">=0.21.0" - -[package.extras] -azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pylance" -version = "0.9.18" -description = "python wrapper for Lance columnar format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pylance-0.9.18-cp38-abi3-macosx_10_15_x86_64.whl", hash = "sha256:fe2445d922c594d90e89111385106f6b152caab27996217db7bb4b8947eb0bea"}, - {file = "pylance-0.9.18-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:a2c424c50f5186edbbcc5a26f34063ed09d9a7390e28033395728ce02b5658f0"}, - {file = "pylance-0.9.18-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10af06edfde3e8451bf2251381d3980a0a164eab9d4c3d4dc8b6318969e958a6"}, - {file = "pylance-0.9.18-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d8bb9045d7163cc966b9fe34a917044192be37a90915475b77461e5b7d89e442"}, - {file = "pylance-0.9.18-cp38-abi3-win_amd64.whl", hash = "sha256:5ea80b7bf70d992f3fe63bce2d2f064f742124c04eaedeb76baca408ded85a2c"}, -] - -[package.dependencies] -numpy = ">=1.22" -pyarrow = ">=12" - -[package.extras] -benchmarks = ["pytest-benchmark"] -dev = ["ruff (==0.2.2)"] -tests = ["datasets", "duckdb", "ml_dtypes", "pandas", "pillow", "polars[pandas,pyarrow]", "pytest", "tensorflow", "tqdm"] -torch = ["torch"] - -[[package]] -name = "pymilvus" -version = "2.4.5" -description = "Python Sdk for Milvus" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pymilvus-2.4.5-py3-none-any.whl", hash = "sha256:dc4f2d1eac8db9cf3951de39566a1a244695760bb94d8310fbfc73d6d62bb267"}, - {file = "pymilvus-2.4.5.tar.gz", hash = "sha256:1a497fe9b41d6bf62b1d5e1c412960922dde1598576fcbb8818040c8af11149f"}, -] - -[package.dependencies] -environs = "<=9.5.0" -grpcio = ">=1.49.1,<=1.63.0" -milvus-lite = {version = ">=2.4.0,<2.5.0", markers = "sys_platform != \"win32\""} -pandas = ">=1.2.4" -protobuf = ">=3.20.0" -setuptools = ">69" -ujson = ">=2.0.0" - -[package.extras] -bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "requests"] -dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] -model = ["milvus-model (>=0.1.0)"] - -[[package]] -name = "pymongo" -version = "4.8.0" -description = "Python driver for MongoDB " -optional = false -python-versions = ">=3.8" -files = [ - {file = "pymongo-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2b7bec27e047e84947fbd41c782f07c54c30c76d14f3b8bf0c89f7413fac67a"}, - {file = "pymongo-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c68fe128a171493018ca5c8020fc08675be130d012b7ab3efe9e22698c612a1"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920d4f8f157a71b3cb3f39bc09ce070693d6e9648fb0e30d00e2657d1dca4e49"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b4108ac9469febba18cea50db972605cc43978bedaa9fea413378877560ef8"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:180d5eb1dc28b62853e2f88017775c4500b07548ed28c0bd9c005c3d7bc52526"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec2b9088cdbceb87e6ca9c639d0ff9b9d083594dda5ca5d3c4f6774f4c81b33"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0cf61450feadca81deb1a1489cb1a3ae1e4266efd51adafecec0e503a8dcd84"}, - {file = "pymongo-4.8.0-cp310-cp310-win32.whl", hash = "sha256:8b18c8324809539c79bd6544d00e0607e98ff833ca21953df001510ca25915d1"}, - {file = "pymongo-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e5df28f74002e37bcbdfdc5109799f670e4dfef0fb527c391ff84f078050e7b5"}, - {file = "pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68"}, - {file = "pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554"}, - {file = "pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba"}, - {file = "pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88"}, - {file = "pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526"}, - {file = "pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2"}, - {file = "pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69"}, - {file = "pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8"}, - {file = "pymongo-4.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:519d1bab2b5e5218c64340b57d555d89c3f6c9d717cecbf826fb9d42415e7750"}, - {file = "pymongo-4.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87075a1feb1e602e539bdb1ef8f4324a3427eb0d64208c3182e677d2c0718b6f"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f53429515d2b3e86dcc83dadecf7ff881e538c168d575f3688698a8707b80a"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdc20cd1e1141b04696ffcdb7c71e8a4a665db31fe72e51ec706b3bdd2d09f36"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:284d0717d1a7707744018b0b6ee7801b1b1ff044c42f7be7a01bb013de639470"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bf0eb8b6ef40fa22479f09375468c33bebb7fe49d14d9c96c8fd50355188b0"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ecd71b9226bd1d49416dc9f999772038e56f415a713be51bf18d8676a0841c8"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0061af6e8c5e68b13f1ec9ad5251247726653c5af3c0bbdfbca6cf931e99216"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:658d0170f27984e0d89c09fe5c42296613b711a3ffd847eb373b0dbb5b648d5f"}, - {file = "pymongo-4.8.0-cp38-cp38-win32.whl", hash = "sha256:3ed1c316718a2836f7efc3d75b4b0ffdd47894090bc697de8385acd13c513a70"}, - {file = "pymongo-4.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:7148419eedfea9ecb940961cfe465efaba90595568a1fb97585fb535ea63fe2b"}, - {file = "pymongo-4.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8400587d594761e5136a3423111f499574be5fd53cf0aefa0d0f05b180710b0"}, - {file = "pymongo-4.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af3e98dd9702b73e4e6fd780f6925352237f5dce8d99405ff1543f3771201704"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de3a860f037bb51f968de320baef85090ff0bbb42ec4f28ec6a5ddf88be61871"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fc18b3a093f3db008c5fea0e980dbd3b743449eee29b5718bc2dc15ab5088bb"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18c9d8f975dd7194c37193583fd7d1eb9aea0c21ee58955ecf35362239ff31ac"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:408b2f8fdbeca3c19e4156f28fff1ab11c3efb0407b60687162d49f68075e63c"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6564780cafd6abeea49759fe661792bd5a67e4f51bca62b88faab497ab5fe89"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d18d86bc9e103f4d3d4f18b85a0471c0e13ce5b79194e4a0389a224bb70edd53"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9097c331577cecf8034422956daaba7ec74c26f7b255d718c584faddd7fa2e3c"}, - {file = "pymongo-4.8.0-cp39-cp39-win32.whl", hash = "sha256:d5428dbcd43d02f6306e1c3c95f692f68b284e6ee5390292242f509004c9e3a8"}, - {file = "pymongo-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:ef7225755ed27bfdb18730c68f6cb023d06c28f2b734597480fb4c0e500feb6f"}, - {file = "pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde"}, -] - -[package.dependencies] -dnspython = ">=1.16.0,<3.0.0" - -[package.extras] -aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] -docs = ["furo (==2023.9.10)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-rtd-theme (>=2,<3)", "sphinxcontrib-shellcheck (>=1,<2)"] -encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"] -gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] -ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -test = ["pytest (>=7)"] -zstd = ["zstandard"] - -[[package]] -name = "pynacl" -version = "1.5.0" -description = "Python binding to the Networking and Cryptography (NaCl) library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] - -[package.dependencies] -cffi = ">=1.4.1" - -[package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pypdf" -version = "4.3.1" -description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"}, - {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"}, -] - -[package.dependencies] -typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} - -[package.extras] -crypto = ["PyCryptodome", "cryptography"] -dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] -docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] -image = ["Pillow (>=8.0.0)"] - -[[package]] -name = "pyperclip" -version = "1.9.0" -description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" -optional = false -python-versions = "*" -files = [ - {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, -] - -[[package]] -name = "pypika" -version = "0.48.9" -description = "A SQL query builder API for Python" -optional = false -python-versions = "*" -files = [ - {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, -] - -[[package]] -name = "pyproject-hooks" -version = "1.1.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, -] - -[[package]] -name = "pyreadline3" -version = "3.4.1" -description = "A python implementation of GNU readline." -optional = false -python-versions = "*" -files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, -] - -[[package]] -name = "pyright" -version = "1.1.376" -description = "Command line wrapper for pyright" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyright-1.1.376-py3-none-any.whl", hash = "sha256:0f2473b12c15c46b3207f0eec224c3cea2bdc07cd45dd4a037687cbbca0fbeff"}, - {file = "pyright-1.1.376.tar.gz", hash = "sha256:bffd63b197cd0810395bb3245c06b01f95a85ddf6bfa0e5644ed69c841e954dd"}, -] - -[package.dependencies] -nodeenv = ">=1.6.0" - -[package.extras] -all = ["twine (>=3.4.1)"] -dev = ["twine (>=3.4.1)"] - -[[package]] -name = "pysbd" -version = "0.3.4" -description = "pysbd (Python Sentence Boundary Disambiguation) is a rule-based sentence boundary detection that works out-of-the-box across many languages." -optional = false -python-versions = ">=3" -files = [ - {file = "pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953"}, -] - -[[package]] -name = "pysher" -version = "1.0.8" -description = "Pusher websocket client for python, based on Erik Kulyk's PythonPusherClient" -optional = false -python-versions = "*" -files = [ - {file = "Pysher-1.0.8.tar.gz", hash = "sha256:7849c56032b208e49df67d7bd8d49029a69042ab0bb45b2ed59fa08f11ac5988"}, -] - -[package.dependencies] -requests = ">=2.26.0" -websocket-client = "!=0.49" - -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] - -[[package]] -name = "pytest" -version = "8.3.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.8" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cov" -version = "5.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - -[[package]] -name = "pytest-flakefinder" -version = "1.1.0" -description = "Runs tests multiple times to expose flakiness." -optional = false -python-versions = ">=3.5" -files = [ - {file = "pytest-flakefinder-1.1.0.tar.gz", hash = "sha256:e2412a1920bdb8e7908783b20b3d57e9dad590cc39a93e8596ffdd493b403e0e"}, - {file = "pytest_flakefinder-1.1.0-py2.py3-none-any.whl", hash = "sha256:741e0e8eea427052f5b8c89c2b3c3019a50c39a59ce4df6a305a2c2d9ba2bd13"}, -] - -[package.dependencies] -pytest = ">=2.7.1" - -[[package]] -name = "pytest-instafail" -version = "0.5.0" -description = "pytest plugin to show failures instantly" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-instafail-0.5.0.tar.gz", hash = "sha256:33a606f7e0c8e646dc3bfee0d5e3a4b7b78ef7c36168cfa1f3d93af7ca706c9e"}, - {file = "pytest_instafail-0.5.0-py3-none-any.whl", hash = "sha256:6855414487e9e4bb76a118ce952c3c27d3866af15487506c4ded92eb72387819"}, -] - -[package.dependencies] -pytest = ">=5" - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "pytest-profiling" -version = "1.7.0" -description = "Profiling plugin for py.test" -optional = false -python-versions = "*" -files = [ - {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"}, - {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"}, -] - -[package.dependencies] -gprof2dot = "*" -pytest = "*" -six = "*" - -[package.extras] -tests = ["pytest-virtualenv"] - -[[package]] -name = "pytest-split" -version = "0.9.0" -description = "Pytest plugin which splits the test suite to equally sized sub suites based on test execution time." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "pytest_split-0.9.0-py3-none-any.whl", hash = "sha256:9e197df601828d76a1ab615158d9c6253ec9f96e46c1d3ea27187aa5ac0ef9de"}, - {file = "pytest_split-0.9.0.tar.gz", hash = "sha256:ca52527e4d9024f6ec3aba723527bd276d12096024999b1f5b8445a38da1e81c"}, -] - -[package.dependencies] -pytest = ">=5,<9" - -[[package]] -name = "pytest-sugar" -version = "1.0.0" -description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." -optional = false -python-versions = "*" -files = [ - {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, - {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, -] - -[package.dependencies] -packaging = ">=21.3" -pytest = ">=6.2.0" -termcolor = ">=2.1.0" - -[package.extras] -dev = ["black", "flake8", "pre-commit"] - -[[package]] -name = "pytest-xdist" -version = "3.6.1" -description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, - {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, -] - -[package.dependencies] -execnet = ">=2.1" -pytest = ">=7.0.0" - -[package.extras] -psutil = ["psutil (>=3.0)"] -setproctitle = ["setproctitle"] -testing = ["filelock"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-docx" -version = "1.1.2" -description = "Create, read, and update Microsoft Word .docx files." -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, - {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, -] - -[package.dependencies] -lxml = ">=3.1.0" -typing-extensions = ">=4.9.0" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-iso639" -version = "2024.4.27" -description = "ISO 639 language codes, names, and other associated information" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python_iso639-2024.4.27-py3-none-any.whl", hash = "sha256:27526a84cebc4c4d53fea9d1ebbc7209c8d279bebaa343e6765a1fc8780565ab"}, - {file = "python_iso639-2024.4.27.tar.gz", hash = "sha256:97e63b5603e085c6a56a12a95740010e75d9134e0aab767e0978b53fd8824f13"}, -] - -[package.extras] -dev = ["black (==24.4.2)", "build (==1.2.1)", "flake8 (==7.0.0)", "pytest (==8.1.2)", "requests (==2.31.0)", "twine (==5.0.0)"] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "python-magic" -version = "0.4.27" -description = "File type identification using libmagic" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, - {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, -] - -[[package]] -name = "python-multipart" -version = "0.0.7" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, - {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, -] - -[package.extras] -dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] - -[[package]] -name = "python-pptx" -version = "0.6.23" -description = "Generate and manipulate Open XML PowerPoint (.pptx) files" -optional = false -python-versions = "*" -files = [ - {file = "python-pptx-0.6.23.tar.gz", hash = "sha256:587497ff28e779ab18dbb074f6d4052893c85dedc95ed75df319364f331fedee"}, - {file = "python_pptx-0.6.23-py3-none-any.whl", hash = "sha256:dd0527194627a2b7cc05f3ba23ecaa2d9a0d5ac9b6193a28ed1b7a716f4217d4"}, -] - -[package.dependencies] -lxml = ">=3.1.0" -Pillow = ">=3.3.2" -XlsxWriter = ">=0.5.7" - -[[package]] -name = "pytube" -version = "15.0.0" -description = "Python 3 library for downloading YouTube Videos." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytube-15.0.0-py3-none-any.whl", hash = "sha256:07b9904749e213485780d7eb606e5e5b8e4341aa4dccf699160876da00e12d78"}, - {file = "pytube-15.0.0.tar.gz", hash = "sha256:076052efe76f390dfa24b1194ff821d4e86c17d41cb5562f3a276a8bcbfc9d1d"}, -] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.1.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, - {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, - {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, - {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, - {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, - {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, - {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, - {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, - {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qdrant-client" -version = "1.11.0" -description = "Client library for the Qdrant vector search engine" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qdrant_client-1.11.0-py3-none-any.whl", hash = "sha256:1f574ccebb91c0bc8a620c9a41a5a010084fbc4d8c6f1cd0ab7b2eeb97336fc0"}, - {file = "qdrant_client-1.11.0.tar.gz", hash = "sha256:7c1d4d7a96cfd1ee0cde2a21c607e9df86bcca795ad8d1fd274d295ab64b8458"}, -] - -[package.dependencies] -grpcio = ">=1.41.0" -grpcio-tools = ">=1.41.0" -httpx = {version = ">=0.20.0", extras = ["http2"]} -numpy = [ - {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26", markers = "python_version >= \"3.12\""}, -] -portalocker = ">=2.7.0,<3.0.0" -pydantic = ">=1.10.8" -urllib3 = ">=1.26.14,<3" - -[package.extras] -fastembed = ["fastembed (==0.3.4)"] -fastembed-gpu = ["fastembed-gpu (==0.3.4)"] - -[[package]] -name = "qianfan" -version = "0.3.5" -description = "文心千帆大模型平台 Python SDK" -optional = false -python-versions = ">=3.7,<4" -files = [ - {file = "qianfan-0.3.5-py3-none-any.whl", hash = "sha256:0d5712c93ec6877c4176aae21ff58b41ccfef7ba661c6e19c07209c353353a16"}, - {file = "qianfan-0.3.5.tar.gz", hash = "sha256:b71847888bd99d61cee5f84f614f431204f3d656d71dd7ae1d0f9bc9ae51b42b"}, -] - -[package.dependencies] -aiohttp = ">=3.7.0" -aiolimiter = ">=1.1.0" -bce-python-sdk = ">=0.8.79" -clevercsv = {version = "*", markers = "python_version >= \"3.8\""} -ijson = "*" -multiprocess = "*" -numpy = {version = ">=1.22.0", markers = "python_version >= \"3.8\""} -prompt-toolkit = ">=3.0.38" -pyarrow = {version = ">=14.0.1", markers = "python_version >= \"3.8\""} -pydantic = "*" -python-dateutil = ">=2.8.2,<3.0.0" -python-dotenv = {version = ">=1.0", markers = "python_version >= \"3.8\""} -pyyaml = ">=6.0.1,<7.0.0" -requests = ">=2.24" -rich = ">=13.0.0" -tenacity = ">=8.2.3,<9.0.0" -typer = ">=0.9.0" -typing-extensions = {version = ">=4.0.0", markers = "python_full_version <= \"3.10.0\""} - -[package.extras] -all = ["emoji", "langchain (>=0.0.321)", "ltp", "sentencepiece", "torch", "torch (<=1.13.1)"] -data-clean = ["emoji", "ltp", "sentencepiece", "torch", "torch (<=1.13.1)"] -langchain = ["langchain (>=0.0.321)"] - -[[package]] -name = "rapidfuzz" -version = "3.9.6" -description = "rapid fuzzy string matching" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7ed0d0b9c85720f0ae33ac5efc8dc3f60c1489dad5c29d735fbdf2f66f0431f"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f3deff6ab7017ed21b9aec5874a07ad13e6b2a688af055837f88b743c7bfd947"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3f9fc060160507b2704f7d1491bd58453d69689b580cbc85289335b14fe8ca"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e86c2b3827fa6169ad6e7d4b790ce02a20acefb8b78d92fa4249589bbc7a2c"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f982e1aafb4bd8207a5e073b1efef9e68a984e91330e1bbf364f9ed157ed83f0"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9196a51d0ec5eaaaf5bca54a85b7b1e666fc944c332f68e6427503af9fb8c49e"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5a514064e02585b1cc09da2fe406a6dc1a7e5f3e92dd4f27c53e5f1465ec81"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3a4244f65dbc3580b1275480118c3763f9dc29fc3dd96610560cb5e140a4d4a"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6ebb910a702e41641e1e1dada3843bc11ba9107a33c98daef6945a885a40a07"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:624fbe96115fb39addafa288d583b5493bc76dab1d34d0ebba9987d6871afdf9"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1c59f1c1507b7a557cf3c410c76e91f097460da7d97e51c985343798e9df7a3c"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f0256cb27b6a0fb2e1918477d1b56473cd04acfa245376a342e7c15806a396"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win32.whl", hash = "sha256:24d473d00d23a30a85802b502b417a7f5126019c3beec91a6739fe7b95388b24"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:248f6d2612e661e2b5f9a22bbd5862a1600e720da7bb6ad8a55bb1548cdfa423"}, - {file = "rapidfuzz-3.9.6-cp310-cp310-win_arm64.whl", hash = "sha256:e03fdf0e74f346ed7e798135df5f2a0fb8d6b96582b00ebef202dcf2171e1d1d"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52e4675f642fbc85632f691b67115a243cd4d2a47bdcc4a3d9a79e784518ff97"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f93a2f13038700bd245b927c46a2017db3dcd4d4ff94687d74b5123689b873b"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b70500bca460264b8141d8040caee22e9cf0418c5388104ff0c73fb69ee28f"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1e037fb89f714a220f68f902fc6300ab7a33349f3ce8ffae668c3b3a40b0b06"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6792f66d59b86ccfad5e247f2912e255c85c575789acdbad8e7f561412ffed8a"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68d9cffe710b67f1969cf996983608cee4490521d96ea91d16bd7ea5dc80ea98"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63daaeeea76da17fa0bbe7fb05cba8ed8064bb1a0edf8360636557f8b6511961"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d214e063bffa13e3b771520b74f674b22d309b5720d4df9918ff3e0c0f037720"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ed443a2062460f44c0346cb9d269b586496b808c2419bbd6057f54061c9b9c75"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5b0c9b227ee0076fb2d58301c505bb837a290ae99ee628beacdb719f0626d749"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:82c9722b7dfaa71e8b61f8c89fed0482567fb69178e139fe4151fc71ed7df782"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c18897c95c0a288347e29537b63608a8f63a5c3cb6da258ac46fcf89155e723e"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win32.whl", hash = "sha256:3e910cf08944da381159587709daaad9e59d8ff7bca1f788d15928f3c3d49c2a"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:59c4a61fab676d37329fc3a671618a461bfeef53a4d0b8b12e3bc24a14e166f8"}, - {file = "rapidfuzz-3.9.6-cp311-cp311-win_arm64.whl", hash = "sha256:8b4afea244102332973377fddbe54ce844d0916e1c67a5123432291717f32ffa"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:70591b28b218fff351b88cdd7f2359a01a71f9f7f5a2e465ce3715ed4b3c422b"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee2d8355c7343c631a03e57540ea06e8717c19ecf5ff64ea07e0498f7f161457"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:708fb675de0f47b9635d1cc6fbbf80d52cb710d0a1abbfae5c84c46e3abbddc3"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d66c247c2d3bb7a9b60567c395a15a929d0ebcc5f4ceedb55bfa202c38c6e0c"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15146301b32e6e3d2b7e8146db1a26747919d8b13690c7f83a4cb5dc111b3a08"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7a03da59b6c7c97e657dd5cd4bcaab5fe4a2affd8193958d6f4d938bee36679"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2c2fe19e392dbc22695b6c3b2510527e2b774647e79936bbde49db7742d6f1"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:91aaee4c94cb45930684f583ffc4e7c01a52b46610971cede33586cf8a04a12e"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3f5702828c10768f9281180a7ff8597da1e5002803e1304e9519dd0f06d79a85"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ccd1763b608fb4629a0b08f00b3c099d6395e67c14e619f6341b2c8429c2f310"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc7a0d4b2cb166bc46d02c8c9f7551cde8e2f3c9789df3827309433ee9771163"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7496f53d40560a58964207b52586783633f371683834a8f719d6d965d223a2eb"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win32.whl", hash = "sha256:5eb1a9272ca71bc72be5415c2fa8448a6302ea4578e181bb7da9db855b367df0"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:0d21fc3c0ca507a1180152a6dbd129ebaef48facde3f943db5c1055b6e6be56a"}, - {file = "rapidfuzz-3.9.6-cp312-cp312-win_arm64.whl", hash = "sha256:43bb27a57c29dc5fa754496ba6a1a508480d21ae99ac0d19597646c16407e9f3"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:83a5ac6547a9d6eedaa212975cb8f2ce2aa07e6e30833b40e54a52b9f9999aa4"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10f06139142ecde67078ebc9a745965446132b998f9feebffd71acdf218acfcc"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74720c3f24597f76c7c3e2c4abdff55f1664f4766ff5b28aeaa689f8ffba5fab"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2bce52b5c150878e558a0418c2b637fb3dbb6eb38e4eb27d24aa839920483e"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1611199f178793ca9a060c99b284e11f6d7d124998191f1cace9a0245334d219"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0308b2ad161daf502908a6e21a57c78ded0258eba9a8f5e2545e2dafca312507"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eda91832201b86e3b70835f91522587725bec329ec68f2f7faf5124091e5ca7"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ece873c093aedd87fc07c2a7e333d52e458dc177016afa1edaf157e82b6914d8"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d97d3c9d209d5c30172baea5966f2129e8a198fec4a1aeb2f92abb6e82a2edb1"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6c4550d0db4931f5ebe9f0678916d1b06f06f5a99ba0b8a48b9457fd8959a7d4"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b6b8dd4af6324fc325d9483bec75ecf9be33e590928c9202d408e4eafff6a0a6"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16122ae448bc89e2bea9d81ce6cb0f751e4e07da39bd1e70b95cae2493857853"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win32.whl", hash = "sha256:71cc168c305a4445109cd0d4925406f6e66bcb48fde99a1835387c58af4ecfe9"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win_amd64.whl", hash = "sha256:59ee78f2ecd53fef8454909cda7400fe2cfcd820f62b8a5d4dfe930102268054"}, - {file = "rapidfuzz-3.9.6-cp313-cp313-win_arm64.whl", hash = "sha256:58b4ce83f223605c358ae37e7a2d19a41b96aa65b1fede99cc664c9053af89ac"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f469dbc9c4aeaac7dd005992af74b7dff94aa56a3ea063ce64e4b3e6736dd2f"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a9ed7ad9adb68d0fe63a156fe752bbf5f1403ed66961551e749641af2874da92"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ffe48ffbeedf78d120ddfb9d583f2ca906712159a4e9c3c743c9f33e7b1775"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8502ccdea9084d54b6f737d96a3b60a84e3afed9d016686dc979b49cdac71613"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a4bec4956e06b170ca896ba055d08d4c457dac745548172443982956a80e118"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c0488b1c273be39e109ff885ccac0448b2fa74dea4c4dc676bcf756c15f16d6"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0542c036cb6acf24edd2c9e0411a67d7ba71e29e4d3001a082466b86fc34ff30"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0a96b52c9f26857bf009e270dcd829381e7a634f7ddd585fa29b87d4c82146d9"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6edd3cd7c4aa8c68c716d349f531bd5011f2ca49ddade216bb4429460151559f"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:50b2fb55d7ed58c66d49c9f954acd8fc4a3f0e9fd0ff708299bd8abb68238d0e"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:32848dfe54391636b84cda1823fd23e5a6b1dbb8be0e9a1d80e4ee9903820994"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:29146cb7a1bf69c87e928b31bffa54f066cb65639d073b36e1425f98cccdebc6"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-win32.whl", hash = "sha256:aed13e5edacb0ecadcc304cc66e93e7e77ff24f059c9792ee602c0381808e10c"}, - {file = "rapidfuzz-3.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:af440e36b828922256d0b4d79443bf2cbe5515fc4b0e9e96017ec789b36bb9fc"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efa674b407424553024522159296690d99d6e6b1192cafe99ca84592faff16b4"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0b40ff76ee19b03ebf10a0a87938f86814996a822786c41c3312d251b7927849"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16a6c7997cb5927ced6f617122eb116ba514ec6b6f60f4803e7925ef55158891"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3f42504bdc8d770987fc3d99964766d42b2a03e4d5b0f891decdd256236bae0"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9462aa2be9f60b540c19a083471fdf28e7cf6434f068b631525b5e6251b35e"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1629698e68f47609a73bf9e73a6da3a4cac20bc710529215cbdf111ab603665b"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68bc7621843d8e9a7fd1b1a32729465bf94b47b6fb307d906da168413331f8d6"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c6254c50f15bc2fcc33cb93a95a81b702d9e6590f432a7f7822b8c7aba9ae288"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7e535a114fa575bc143e175e4ca386a467ec8c42909eff500f5f0f13dc84e3e0"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d50acc0e9d67e4ba7a004a14c42d1b1e8b6ca1c515692746f4f8e7948c673167"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fa742ec60bec53c5a211632cf1d31b9eb5a3c80f1371a46a23ac25a1fa2ab209"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c256fa95d29cbe5aa717db790b231a9a5b49e5983d50dc9df29d364a1db5e35b"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win32.whl", hash = "sha256:89acbf728b764421036c173a10ada436ecca22999851cdc01d0aa904c70d362d"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:c608fcba8b14d86c04cb56b203fed31a96e8a1ebb4ce99e7b70313c5bf8cf497"}, - {file = "rapidfuzz-3.9.6-cp39-cp39-win_arm64.whl", hash = "sha256:d41c00ded0e22e9dba88ff23ebe0dc9d2a5f21ba2f88e185ea7374461e61daa9"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a65c2f63218ea2dedd56fc56361035e189ca123bd9c9ce63a9bef6f99540d681"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:680dc78a5f889d3b89f74824b89fe357f49f88ad10d2c121e9c3ad37bac1e4eb"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ca862927a0b05bd825e46ddf82d0724ea44b07d898ef639386530bf9b40f15"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2116fa1fbff21fa52cd46f3cfcb1e193ba1d65d81f8b6e123193451cd3d6c15e"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dcb7d9afd740370a897c15da61d3d57a8d54738d7c764a99cedb5f746d6a003"}, - {file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1a5bd6401bb489e14cbb5981c378d53ede850b7cc84b2464cad606149cc4e17d"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:29fda70b9d03e29df6fc45cc27cbcc235534b1b0b2900e0a3ae0b43022aaeef5"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:88144f5f52ae977df9352029488326afadd7a7f42c6779d486d1f82d43b2b1f2"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:715aeaabafba2709b9dd91acb2a44bad59d60b4616ef90c08f4d4402a3bbca60"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af26ebd3714224fbf9bebbc27bdbac14f334c15f5d7043699cd694635050d6ca"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101bd2df438861a005ed47c032631b7857dfcdb17b82beeeb410307983aac61d"}, - {file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2185e8e29809b97ad22a7f99281d1669a89bdf5fa1ef4ef1feca36924e675367"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9e53c72d08f0e9c6e4a369e52df5971f311305b4487690c62e8dd0846770260c"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a0cb157162f0cdd62e538c7bd298ff669847fc43a96422811d5ab933f4c16c3a"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bb5ff2bd48132ed5e7fbb8f619885facb2e023759f2519a448b2c18afe07e5d"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dc37f601865e8407e3a8037ffbc3afe0b0f837b2146f7632bd29d087385babe"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a657eee4b94668faf1fa2703bdd803654303f7e468eb9ba10a664d867ed9e779"}, - {file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:51be6ab5b1d5bb32abd39718f2a5e3835502e026a8272d139ead295c224a6f5e"}, - {file = "rapidfuzz-3.9.6.tar.gz", hash = "sha256:5cf2a7d621e4515fee84722e93563bf77ff2cbe832a77a48b81f88f9e23b9e8d"}, -] - -[package.extras] -full = ["numpy"] - -[[package]] -name = "ratelimiter" -version = "1.2.0.post0" -description = "Simple python rate limiting object" -optional = false -python-versions = "*" -files = [ - {file = "ratelimiter-1.2.0.post0-py3-none-any.whl", hash = "sha256:a52be07bc0bb0b3674b4b304550f10c769bbb00fead3072e035904474259809f"}, - {file = "ratelimiter-1.2.0.post0.tar.gz", hash = "sha256:5c395dcabdbbde2e5178ef3f89b568a3066454a6ddc223b76473dac22f89b4f7"}, -] - -[package.extras] -test = ["pytest (>=3.0)", "pytest-asyncio"] - -[[package]] -name = "realtime" -version = "1.0.6" -description = "" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "realtime-1.0.6-py3-none-any.whl", hash = "sha256:c66918a106d8ef348d1821f2dbf6683d8833825580d95b2fdea9995406b42838"}, - {file = "realtime-1.0.6.tar.gz", hash = "sha256:2be0d8a6305513d423604ee319216108fc20105cb7438922d5c8958c48f40a47"}, -] - -[package.dependencies] -python-dateutil = ">=2.8.1,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" -websockets = ">=11,<13" - -[[package]] -name = "redis" -version = "5.0.8" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} - -[package.extras] -hiredis = ["hiredis (>1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2023.12.25" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.7" -files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "respx" -version = "0.21.1" -description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." -optional = false -python-versions = ">=3.7" -files = [ - {file = "respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20"}, - {file = "respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af"}, -] - -[package.dependencies] -httpx = ">=0.21.0" - -[[package]] -name = "retry" -version = "0.9.2" -description = "Easy to use retry decorator." -optional = false -python-versions = "*" -files = [ - {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, - {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, -] - -[package.dependencies] -decorator = ">=3.4.2" -py = ">=1.4.26,<2.0.0" - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruff" -version = "0.4.10" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.4.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c2c4d0859305ac5a16310eec40e4e9a9dec5dcdfbe92697acd99624e8638dac"}, - {file = "ruff-0.4.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a79489607d1495685cdd911a323a35871abfb7a95d4f98fc6f85e799227ac46e"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1dd1681dfa90a41b8376a61af05cc4dc5ff32c8f14f5fe20dba9ff5deb80cd6"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c75c53bb79d71310dc79fb69eb4902fba804a81f374bc86a9b117a8d077a1784"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18238c80ee3d9100d3535d8eb15a59c4a0753b45cc55f8bf38f38d6a597b9739"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d8f71885bce242da344989cae08e263de29752f094233f932d4f5cfb4ef36a81"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:330421543bd3222cdfec481e8ff3460e8702ed1e58b494cf9d9e4bf90db52b9d"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e9b6fb3a37b772628415b00c4fc892f97954275394ed611056a4b8a2631365e"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f54c481b39a762d48f64d97351048e842861c6662d63ec599f67d515cb417f6"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:67fe086b433b965c22de0b4259ddfe6fa541c95bf418499bedb9ad5fb8d1c631"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:acfaaab59543382085f9eb51f8e87bac26bf96b164839955f244d07125a982ef"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3cea07079962b2941244191569cf3a05541477286f5cafea638cd3aa94b56815"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:338a64ef0748f8c3a80d7f05785930f7965d71ca260904a9321d13be24b79695"}, - {file = "ruff-0.4.10-py3-none-win32.whl", hash = "sha256:ffe3cd2f89cb54561c62e5fa20e8f182c0a444934bf430515a4b422f1ab7b7ca"}, - {file = "ruff-0.4.10-py3-none-win_amd64.whl", hash = "sha256:67f67cef43c55ffc8cc59e8e0b97e9e60b4837c8f21e8ab5ffd5d66e196e25f7"}, - {file = "ruff-0.4.10-py3-none-win_arm64.whl", hash = "sha256:dd1fcee327c20addac7916ca4e2653fbbf2e8388d8a6477ce5b4e986b68ae6c0"}, - {file = "ruff-0.4.10.tar.gz", hash = "sha256:3aa4f2bc388a30d346c56524f7cacca85945ba124945fe489952aadb6b5cd804"}, -] - -[[package]] -name = "s3transfer" -version = "0.10.2" -description = "An Amazon S3 Transfer Manager" -optional = false -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, -] - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - -[[package]] -name = "safetensors" -version = "0.4.4" -description = "" -optional = true -python-versions = ">=3.7" -files = [ - {file = "safetensors-0.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2adb497ada13097f30e386e88c959c0fda855a5f6f98845710f5bb2c57e14f12"}, - {file = "safetensors-0.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7db7fdc2d71fd1444d85ca3f3d682ba2df7d61a637dfc6d80793f439eae264ab"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4f0eed76b430f009fbefca1a0028ddb112891b03cb556d7440d5cd68eb89a9"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d216fab0b5c432aabf7170883d7c11671622bde8bd1436c46d633163a703f6"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d9b76322e49c056bcc819f8bdca37a2daa5a6d42c07f30927b501088db03309"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32f0d1f6243e90ee43bc6ee3e8c30ac5b09ca63f5dd35dbc985a1fc5208c451a"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d464bdc384874601a177375028012a5f177f1505279f9456fea84bbc575c7f"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63144e36209ad8e4e65384dbf2d52dd5b1866986079c00a72335402a38aacdc5"}, - {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:051d5ecd490af7245258000304b812825974d5e56f14a3ff7e1b8b2ba6dc2ed4"}, - {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51bc8429d9376224cd3cf7e8ce4f208b4c930cd10e515b6ac6a72cbc3370f0d9"}, - {file = "safetensors-0.4.4-cp310-none-win32.whl", hash = "sha256:fb7b54830cee8cf9923d969e2df87ce20e625b1af2fd194222ab902d3adcc29c"}, - {file = "safetensors-0.4.4-cp310-none-win_amd64.whl", hash = "sha256:4b3e8aa8226d6560de8c2b9d5ff8555ea482599c670610758afdc97f3e021e9c"}, - {file = "safetensors-0.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bbaa31f2cb49013818bde319232ccd72da62ee40f7d2aa532083eda5664e85ff"}, - {file = "safetensors-0.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fdcb80f4e9fbb33b58e9bf95e7dbbedff505d1bcd1c05f7c7ce883632710006"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55c14c20be247b8a1aeaf3ab4476265e3ca83096bb8e09bb1a7aa806088def4f"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:949aaa1118660f992dbf0968487b3e3cfdad67f948658ab08c6b5762e90cc8b6"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c11a4ab7debc456326a2bac67f35ee0ac792bcf812c7562a4a28559a5c795e27"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cea44bba5c5601b297bc8307e4075535b95163402e4906b2e9b82788a2a6df"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9d752c97f6bbe327352f76e5b86442d776abc789249fc5e72eacb49e6916482"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03f2bb92e61b055ef6cc22883ad1ae898010a95730fa988c60a23800eb742c2c"}, - {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf3f91a9328a941acc44eceffd4e1f5f89b030985b2966637e582157173b98"}, - {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:20d218ec2b6899d29d6895419a58b6e44cc5ff8f0cc29fac8d236a8978ab702e"}, - {file = "safetensors-0.4.4-cp311-none-win32.whl", hash = "sha256:8079486118919f600c603536e2490ca37b3dbd3280e3ad6eaacfe6264605ac8a"}, - {file = "safetensors-0.4.4-cp311-none-win_amd64.whl", hash = "sha256:2f8c2eb0615e2e64ee27d478c7c13f51e5329d7972d9e15528d3e4cfc4a08f0d"}, - {file = "safetensors-0.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baec5675944b4a47749c93c01c73d826ef7d42d36ba8d0dba36336fa80c76426"}, - {file = "safetensors-0.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f15117b96866401825f3e94543145028a2947d19974429246ce59403f49e77c6"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a13a9caea485df164c51be4eb0c87f97f790b7c3213d635eba2314d959fe929"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b54bc4ca5f9b9bba8cd4fb91c24b2446a86b5ae7f8975cf3b7a277353c3127c"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08332c22e03b651c8eb7bf5fc2de90044f3672f43403b3d9ac7e7e0f4f76495e"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb62841e839ee992c37bb75e75891c7f4904e772db3691c59daaca5b4ab960e1"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5b927acc5f2f59547270b0309a46d983edc44be64e1ca27a7fcb0474d6cd67"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a69c71b1ae98a8021a09a0b43363b0143b0ce74e7c0e83cacba691b62655fb8"}, - {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23654ad162c02a5636f0cd520a0310902c4421aab1d91a0b667722a4937cc445"}, - {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0677c109d949cf53756859160b955b2e75b0eefe952189c184d7be30ecf7e858"}, - {file = "safetensors-0.4.4-cp312-none-win32.whl", hash = "sha256:a51d0ddd4deb8871c6de15a772ef40b3dbd26a3c0451bb9e66bc76fc5a784e5b"}, - {file = "safetensors-0.4.4-cp312-none-win_amd64.whl", hash = "sha256:2d065059e75a798bc1933c293b68d04d79b586bb7f8c921e0ca1e82759d0dbb1"}, - {file = "safetensors-0.4.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9d625692578dd40a112df30c02a1adf068027566abd8e6a74893bb13d441c150"}, - {file = "safetensors-0.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7cabcf39c81e5b988d0adefdaea2eb9b4fd9bd62d5ed6559988c62f36bfa9a89"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8359bef65f49d51476e9811d59c015f0ddae618ee0e44144f5595278c9f8268c"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a32c662e7df9226fd850f054a3ead0e4213a96a70b5ce37b2d26ba27004e013"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c329a4dcc395364a1c0d2d1574d725fe81a840783dda64c31c5a60fc7d41472c"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:239ee093b1db877c9f8fe2d71331a97f3b9c7c0d3ab9f09c4851004a11f44b65"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd574145d930cf9405a64f9923600879a5ce51d9f315443a5f706374841327b6"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6784eed29f9e036acb0b7769d9e78a0dc2c72c2d8ba7903005350d817e287a4"}, - {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:65a4a6072436bf0a4825b1c295d248cc17e5f4651e60ee62427a5bcaa8622a7a"}, - {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:df81e3407630de060ae8313da49509c3caa33b1a9415562284eaf3d0c7705f9f"}, - {file = "safetensors-0.4.4-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:e4a0f374200e8443d9746e947ebb346c40f83a3970e75a685ade0adbba5c48d9"}, - {file = "safetensors-0.4.4-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:181fb5f3dee78dae7fd7ec57d02e58f7936498d587c6b7c1c8049ef448c8d285"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb4ac1d8f6b65ec84ddfacd275079e89d9df7c92f95675ba96c4f790a64df6e"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76897944cd9239e8a70955679b531b9a0619f76e25476e57ed373322d9c2075d"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a9e9d1a27e51a0f69e761a3d581c3af46729ec1c988fa1f839e04743026ae35"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:005ef9fc0f47cb9821c40793eb029f712e97278dae84de91cb2b4809b856685d"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26987dac3752688c696c77c3576f951dbbdb8c57f0957a41fb6f933cf84c0b62"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c05270b290acd8d249739f40d272a64dd597d5a4b90f27d830e538bc2549303c"}, - {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:068d3a33711fc4d93659c825a04480ff5a3854e1d78632cdc8f37fee917e8a60"}, - {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:063421ef08ca1021feea8b46951251b90ae91f899234dd78297cbe7c1db73b99"}, - {file = "safetensors-0.4.4-cp37-none-win32.whl", hash = "sha256:d52f5d0615ea83fd853d4e1d8acf93cc2e0223ad4568ba1e1f6ca72e94ea7b9d"}, - {file = "safetensors-0.4.4-cp37-none-win_amd64.whl", hash = "sha256:88a5ac3280232d4ed8e994cbc03b46a1807ce0aa123867b40c4a41f226c61f94"}, - {file = "safetensors-0.4.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3467ab511bfe3360967d7dc53b49f272d59309e57a067dd2405b4d35e7dcf9dc"}, - {file = "safetensors-0.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ab4c96d922e53670ce25fbb9b63d5ea972e244de4fa1dd97b590d9fd66aacef"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87df18fce4440477c3ef1fd7ae17c704a69a74a77e705a12be135ee0651a0c2d"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e5fe345b2bc7d88587149ac11def1f629d2671c4c34f5df38aed0ba59dc37f8"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f1a3e01dce3cd54060791e7e24588417c98b941baa5974700eeb0b8eb65b0a0"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6bf35e9a8998d8339fd9a05ac4ce465a4d2a2956cc0d837b67c4642ed9e947"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:166c0c52f6488b8538b2a9f3fbc6aad61a7261e170698779b371e81b45f0440d"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87e9903b8668a16ef02c08ba4ebc91e57a49c481e9b5866e31d798632805014b"}, - {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9c421153aa23c323bd8483d4155b4eee82c9a50ac11cccd83539104a8279c64"}, - {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a4b8617499b2371c7353302c5116a7e0a3a12da66389ce53140e607d3bf7b3d3"}, - {file = "safetensors-0.4.4-cp38-none-win32.whl", hash = "sha256:c6280f5aeafa1731f0a3709463ab33d8e0624321593951aefada5472f0b313fd"}, - {file = "safetensors-0.4.4-cp38-none-win_amd64.whl", hash = "sha256:6ceed6247fc2d33b2a7b7d25d8a0fe645b68798856e0bc7a9800c5fd945eb80f"}, - {file = "safetensors-0.4.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5cf6c6f6193797372adf50c91d0171743d16299491c75acad8650107dffa9269"}, - {file = "safetensors-0.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419010156b914a3e5da4e4adf992bee050924d0fe423c4b329e523e2c14c3547"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88f6fd5a5c1302ce79993cc5feeadcc795a70f953c762544d01fb02b2db4ea33"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d468cffb82d90789696d5b4d8b6ab8843052cba58a15296691a7a3df55143cd2"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9353c2af2dd467333d4850a16edb66855e795561cd170685178f706c80d2c71e"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83c155b4a33368d9b9c2543e78f2452090fb030c52401ca608ef16fa58c98353"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9850754c434e636ce3dc586f534bb23bcbd78940c304775bee9005bf610e98f1"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:275f500b4d26f67b6ec05629a4600645231bd75e4ed42087a7c1801bff04f4b3"}, - {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5c2308de665b7130cd0e40a2329278226e4cf083f7400c51ca7e19ccfb3886f3"}, - {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e06a9ebc8656e030ccfe44634f2a541b4b1801cd52e390a53ad8bacbd65f8518"}, - {file = "safetensors-0.4.4-cp39-none-win32.whl", hash = "sha256:ef73df487b7c14b477016947c92708c2d929e1dee2bacdd6fff5a82ed4539537"}, - {file = "safetensors-0.4.4-cp39-none-win_amd64.whl", hash = "sha256:83d054818a8d1198d8bd8bc3ea2aac112a2c19def2bf73758321976788706398"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1d1f34c71371f0e034004a0b583284b45d233dd0b5f64a9125e16b8a01d15067"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a8043a33d58bc9b30dfac90f75712134ca34733ec3d8267b1bd682afe7194f5"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db8f0c59c84792c12661f8efa85de160f80efe16b87a9d5de91b93f9e0bce3c"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfc1fc38e37630dd12d519bdec9dcd4b345aec9930bb9ce0ed04461f49e58b52"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c9d86d9b13b18aafa88303e2cd21e677f5da2a14c828d2c460fe513af2e9a5"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:43251d7f29a59120a26f5a0d9583b9e112999e500afabcfdcb91606d3c5c89e3"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2c42e9b277513b81cf507e6121c7b432b3235f980cac04f39f435b7902857f91"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3daacc9a4e3f428a84dd56bf31f20b768eb0b204af891ed68e1f06db9edf546f"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218bbb9b883596715fc9997bb42470bf9f21bb832c3b34c2bf744d6fa8f2bbba"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bd5efc26b39f7fc82d4ab1d86a7f0644c8e34f3699c33f85bfa9a717a030e1b"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56ad9776b65d8743f86698a1973292c966cf3abff627efc44ed60e66cc538ddd"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:30f23e6253c5f43a809dea02dc28a9f5fa747735dc819f10c073fe1b605e97d4"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5512078d00263de6cb04e9d26c9ae17611098f52357fea856213e38dc462f81f"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b96c3d9266439d17f35fc2173111d93afc1162f168e95aed122c1ca517b1f8f1"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:08d464aa72a9a13826946b4fb9094bb4b16554bbea2e069e20bd903289b6ced9"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:210160816d5a36cf41f48f38473b6f70d7bcb4b0527bedf0889cc0b4c3bb07db"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb276a53717f2bcfb6df0bcf284d8a12069002508d4c1ca715799226024ccd45"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2c28c6487f17d8db0089e8b2cdc13de859366b94cc6cdc50e1b0a4147b56551"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7915f0c60e4e6e65d90f136d85dd3b429ae9191c36b380e626064694563dbd9f"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:00eea99ae422fbfa0b46065acbc58b46bfafadfcec179d4b4a32d5c45006af6c"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb1ed4fcb0b3c2f3ea2c5767434622fe5d660e5752f21ac2e8d737b1e5e480bb"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:73fc9a0a4343188bdb421783e600bfaf81d0793cd4cce6bafb3c2ed567a74cd5"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c37e6b714200824c73ca6eaf007382de76f39466a46e97558b8dc4cf643cfbf"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75698c5c5c542417ac4956acfc420f7d4a2396adca63a015fd66641ea751759"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca1a209157f242eb183e209040097118472e169f2e069bfbd40c303e24866543"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:177f2b60a058f92a3cec7a1786c9106c29eca8987ecdfb79ee88126e5f47fa31"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ee9622e84fe6e4cd4f020e5fda70d6206feff3157731df7151d457fdae18e541"}, - {file = "safetensors-0.4.4.tar.gz", hash = "sha256:5fe3e9b705250d0172ed4e100a811543108653fb2b66b9e702a088ad03772a07"}, -] - -[package.extras] -all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] -dev = ["safetensors[all]"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] -mlx = ["mlx (>=0.0.9)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] -pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] -torch = ["safetensors[numpy]", "torch (>=1.10)"] - -[[package]] -name = "schema" -version = "0.7.7" -description = "Simple data validation library" -optional = false -python-versions = "*" -files = [ - {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"}, - {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"}, -] - -[[package]] -name = "scikit-learn" -version = "1.5.1" -description = "A set of python modules for machine learning and data mining" -optional = true -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"}, - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"}, - {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"}, - {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d"}, - {file = "scikit_learn-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d"}, - {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.14.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"}, - {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"}, - {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"}, - {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"}, - {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"}, - {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"}, - {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"}, - {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"}, -] - -[package.dependencies] -numpy = ">=1.23.5,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "selenium" -version = "4.23.1" -description = "Official Python bindings for Selenium WebDriver" -optional = false -python-versions = ">=3.8" -files = [ - {file = "selenium-4.23.1-py3-none-any.whl", hash = "sha256:3a8d9f23dc636bd3840dd56f00c2739e32ec0c1e34a821dd553e15babef24477"}, - {file = "selenium-4.23.1.tar.gz", hash = "sha256:128d099e66284437e7128d2279176ec7a06e6ec7426e167f5d34987166bd8f46"}, -] - -[package.dependencies] -certifi = ">=2021.10.8" -trio = ">=0.17,<1.0" -trio-websocket = ">=0.9,<1.0" -typing_extensions = ">=4.9,<5.0" -urllib3 = {version = ">=1.26,<3", extras = ["socks"]} -websocket-client = ">=1.8,<2.0" - -[[package]] -name = "semver" -version = "3.0.2" -description = "Python helper for Semantic Versioning (https://semver.org)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, - {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, -] - -[[package]] -name = "sentence-transformers" -version = "2.7.0" -description = "Multilingual text embeddings" -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "sentence_transformers-2.7.0-py3-none-any.whl", hash = "sha256:6a7276b05a95931581bbfa4ba49d780b2cf6904fa4a171ec7fd66c343f761c98"}, - {file = "sentence_transformers-2.7.0.tar.gz", hash = "sha256:2f7df99d1c021dded471ed2d079e9d1e4fc8e30ecb06f957be060511b36f24ea"}, -] - -[package.dependencies] -huggingface-hub = ">=0.15.1" -numpy = "*" -Pillow = "*" -scikit-learn = "*" -scipy = "*" -torch = ">=1.11.0" -tqdm = "*" -transformers = ">=4.34.0,<5.0.0" - -[package.extras] -dev = ["pre-commit", "pytest", "ruff (>=0.3.0)"] - -[[package]] -name = "sentry-sdk" -version = "2.13.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sentry_sdk-2.13.0-py2.py3-none-any.whl", hash = "sha256:6beede8fc2ab4043da7f69d95534e320944690680dd9a963178a49de71d726c6"}, - {file = "sentry_sdk-2.13.0.tar.gz", hash = "sha256:8d4a576f7a98eb2fdb40e13106e41f330e5c79d72a68be1316e7852cf4995260"}, -] - -[package.dependencies] -certifi = "*" -fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""} -loguru = {version = ">=0.5", optional = true, markers = "extra == \"loguru\""} -urllib3 = ">=1.26.11" - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -anthropic = ["anthropic (>=0.16)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] -langchain = ["langchain (>=0.0.210)"] -litestar = ["litestar (>=2.0.0)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=6)"] - -[[package]] -name = "setuptools" -version = "72.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-72.2.0-py3-none-any.whl", hash = "sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4"}, - {file = "setuptools-72.2.0.tar.gz", hash = "sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9"}, -] - -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shapely" -version = "2.0.5" -description = "Manipulation and analysis of geometric objects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, - {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, - {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, - {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, - {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, - {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"}, - {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"}, - {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"}, - {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"}, - {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"}, - {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"}, - {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"}, - {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, - {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, - {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, - {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, -] - -[package.dependencies] -numpy = ">=1.14,<3" - -[package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "spider-client" -version = "0.0.27" -description = "Python SDK for Spider Cloud API" -optional = false -python-versions = "*" -files = [ - {file = "spider-client-0.0.27.tar.gz", hash = "sha256:c3feaf5c491bd9a6c509efa0c8789452497073d9f68e70fc90e7626a6a8365aa"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "sqlalchemy" -version = "2.0.32" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlmodel" -version = "0.0.18" -description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -optional = false -python-versions = ">=3.7" -files = [ - {file = "sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7"}, - {file = "sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3"}, -] - -[package.dependencies] -pydantic = ">=1.10.13,<3.0.0" -SQLAlchemy = ">=2.0.0,<2.1.0" - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "storage3" -version = "0.7.7" -description = "Supabase Storage client for Python." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "storage3-0.7.7-py3-none-any.whl", hash = "sha256:ed80a2546cd0b5c22e2c30ea71096db6c99268daf2958c603488e7d72efb8426"}, - {file = "storage3-0.7.7.tar.gz", hash = "sha256:9fba680cf761d139ad764f43f0e91c245d1ce1af2cc3afe716652f835f48f83e"}, -] - -[package.dependencies] -httpx = {version = ">=0.24,<0.28", extras = ["http2"]} -python-dateutil = ">=2.8.2,<3.0.0" -typing-extensions = ">=4.2.0,<5.0.0" - -[[package]] -name = "strenum" -version = "0.4.15" -description = "An Enum that inherits from str." -optional = false -python-versions = "*" -files = [ - {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"}, - {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"}, -] - -[package.extras] -docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"] -release = ["twine"] -test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] - -[[package]] -name = "structlog" -version = "24.4.0" -description = "Structured Logging for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, - {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, -] - -[package.extras] -dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] -tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] -typing = ["mypy (>=1.4)", "rich", "twisted"] - -[[package]] -name = "supabase" -version = "2.6.0" -description = "Supabase client for Python." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "supabase-2.6.0-py3-none-any.whl", hash = "sha256:3981016022511e5e58d8fb15d31b24991138eb85fb4be59344db5e8f61b9f92b"}, - {file = "supabase-2.6.0.tar.gz", hash = "sha256:e8ade712b56919eb37724d4de90ee89f2d8f05393acb3e6470ab53ac95f9196e"}, -] - -[package.dependencies] -gotrue = ">=1.3,<3.0" -httpx = ">=0.24,<0.28" -postgrest = ">=0.14,<0.17.0" -realtime = ">=1.0.0,<2.0.0" -storage3 = ">=0.5.3,<0.8.0" -supafunc = ">=0.3.1,<0.6.0" - -[[package]] -name = "supafunc" -version = "0.5.1" -description = "Library for Supabase Functions" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "supafunc-0.5.1-py3-none-any.whl", hash = "sha256:b05e99a2b41270211a3f90ec843c04c5f27a5618f2d2d2eb8e07f41eb962a910"}, - {file = "supafunc-0.5.1.tar.gz", hash = "sha256:1ae9dce6bd935939c561650e86abb676af9665ecf5d4ffc1c7ec3c4932c84334"}, -] - -[package.dependencies] -httpx = {version = ">=0.24,<0.28", extras = ["http2"]} - -[[package]] -name = "sympy" -version = "1.13.2" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tenacity" -version = "8.3.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, - {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "termcolor" -version = "2.4.0" -description = "ANSI color formatting for output in terminal" -optional = false -python-versions = ">=3.8" -files = [ - {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, - {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = true -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tiktoken" -version = "0.7.0" -description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, - {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, - {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, -] - -[package.dependencies] -regex = ">=2022.1.18" -requests = ">=2.26.0" - -[package.extras] -blobfile = ["blobfile (>=2)"] - -[[package]] -name = "tokenizers" -version = "0.19.1" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, -] - -[package.dependencies] -huggingface-hub = ">=0.16.4,<1.0" - -[package.extras] -dev = ["tokenizers[testing]"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "torch" -version = "2.4.0" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.4.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:4ed94583e244af51d6a8d28701ca5a9e02d1219e782f5a01dd401f90af17d8ac"}, - {file = "torch-2.4.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c4ca297b7bd58b506bfd6e78ffd14eb97c0e7797dcd7965df62f50bb575d8954"}, - {file = "torch-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2497cbc7b3c951d69b276ca51fe01c2865db67040ac67f5fc20b03e41d16ea4a"}, - {file = "torch-2.4.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:685418ab93730efbee71528821ff54005596970dd497bf03c89204fb7e3f71de"}, - {file = "torch-2.4.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e743adadd8c8152bb8373543964551a7cb7cc20ba898dc8f9c0cdbe47c283de0"}, - {file = "torch-2.4.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:7334325c0292cbd5c2eac085f449bf57d3690932eac37027e193ba775703c9e6"}, - {file = "torch-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:97730014da4c57ffacb3c09298c6ce05400606e890bd7a05008d13dd086e46b1"}, - {file = "torch-2.4.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f169b4ea6dc93b3a33319611fcc47dc1406e4dd539844dcbd2dec4c1b96e166d"}, - {file = "torch-2.4.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:997084a0f9784d2a89095a6dc67c7925e21bf25dea0b3d069b41195016ccfcbb"}, - {file = "torch-2.4.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:bc3988e8b36d1e8b998d143255d9408d8c75da4ab6dd0dcfd23b623dfb0f0f57"}, - {file = "torch-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3374128bbf7e62cdaed6c237bfd39809fbcfaa576bee91e904706840c3f2195c"}, - {file = "torch-2.4.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:91aaf00bfe1ffa44dc5b52809d9a95129fca10212eca3ac26420eb11727c6288"}, - {file = "torch-2.4.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cc30457ea5489c62747d3306438af00c606b509d78822a88f804202ba63111ed"}, - {file = "torch-2.4.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a046491aaf96d1215e65e1fa85911ef2ded6d49ea34c8df4d0638879f2402eef"}, - {file = "torch-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:688eec9240f3ce775f22e1e1a5ab9894f3d5fe60f3f586deb7dbd23a46a83916"}, - {file = "torch-2.4.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:3af4de2a618fb065e78404c4ba27a818a7b7957eaeff28c6c66ce7fb504b68b8"}, - {file = "torch-2.4.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:618808d3f610d5f180e47a697d4ec90b810953bb1e020f424b2ac7fb0884b545"}, - {file = "torch-2.4.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ed765d232d23566052ba83632ec73a4fccde00b4c94ad45d63b471b09d63b7a7"}, - {file = "torch-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2feb98ac470109472fb10dfef38622a7ee08482a16c357863ebc7bc7db7c8f7"}, - {file = "torch-2.4.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:8940fc8b97a4c61fdb5d46a368f21f4a3a562a17879e932eb51a5ec62310cb31"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} -typing-extensions = ">=4.8.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.11.0)"] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "transformers" -version = "4.40.2" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.40.2-py3-none-any.whl", hash = "sha256:71cb94301ec211a2e1d4b8c8d18dcfaa902dfa00a089dceca167a8aa265d6f2d"}, - {file = "transformers-4.40.2.tar.gz", hash = "sha256:657b6054a2097671398d976ad46e60836e7e15f9ea9551631a96e33cb9240649"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.19.3,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.4.1" -tokenizers = ">=0.19,<0.20" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6,<0.15.0)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] -ray = ["ray[tune] (>=2.7.0)"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.19,<0.20)"] -torch = ["accelerate (>=0.21.0)", "torch"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (>=10.0.1,<=15.0)"] - -[[package]] -name = "trio" -version = "0.26.2" -description = "A friendly Python library for async concurrency and I/O" -optional = false -python-versions = ">=3.8" -files = [ - {file = "trio-0.26.2-py3-none-any.whl", hash = "sha256:c5237e8133eb0a1d72f09a971a55c28ebe69e351c783fc64bc37db8db8bbe1d0"}, - {file = "trio-0.26.2.tar.gz", hash = "sha256:0346c3852c15e5c7d40ea15972c4805689ef2cb8b5206f794c9c19450119f3a4"}, -] - -[package.dependencies] -attrs = ">=23.2.0" -cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -idna = "*" -outcome = "*" -sniffio = ">=1.3.0" -sortedcontainers = "*" - -[[package]] -name = "trio-websocket" -version = "0.11.1" -description = "WebSocket library for Trio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "trio-websocket-0.11.1.tar.gz", hash = "sha256:18c11793647703c158b1f6e62de638acada927344d534e3c7628eedcb746839f"}, - {file = "trio_websocket-0.11.1-py3-none-any.whl", hash = "sha256:520d046b0d030cf970b8b2b2e00c4c2245b3807853ecd44214acd33d74581638"}, -] - -[package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -trio = ">=0.11" -wsproto = ">=0.14" - -[[package]] -name = "triton" -version = "3.0.0" -description = "A language and compiler for custom Deep Learning operations" -optional = true -python-versions = "*" -files = [ - {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, - {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, - {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, - {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, - {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, - {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"}, - {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"}, - {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"}, - {file = "triton-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8903767951bf86ec960b4fe4e21bc970055afc65e9d57e916d79ae3c93665e3"}, - {file = "triton-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41004fb1ae9a53fcb3e970745feb87f0e3c94c6ce1ba86e95fa3b8537894bef7"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] - -[[package]] -name = "typer" -version = "0.12.3" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-cachetools" -version = "5.4.0.20240717" -description = "Typing stubs for cachetools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-cachetools-5.4.0.20240717.tar.gz", hash = "sha256:1eae90c48760bac44ab89108be938e8ce1d740910f2d4b68446dcdc82763f186"}, - {file = "types_cachetools-5.4.0.20240717-py3-none-any.whl", hash = "sha256:67c84c26df988039be68344b162afd2dd7cd3741dc08e7d67aa1954782fd2d2a"}, -] - -[[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, -] - -[package.dependencies] -types-setuptools = "*" - -[[package]] -name = "types-google-cloud-ndb" -version = "2.3.0.20240813" -description = "Typing stubs for google-cloud-ndb" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-google-cloud-ndb-2.3.0.20240813.tar.gz", hash = "sha256:f69b4f1abc4a2c423b288ffc48d2994b59358bfc151824614abc1d3f7f19f18d"}, - {file = "types_google_cloud_ndb-2.3.0.20240813-py3-none-any.whl", hash = "sha256:79404e04e97324d0b6466f297e92e734a38fb9cd064c2f3816820311bc6c3f57"}, -] - -[[package]] -name = "types-passlib" -version = "1.7.7.20240327" -description = "Typing stubs for passlib" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-passlib-1.7.7.20240327.tar.gz", hash = "sha256:4cce6a1a3a6afee9fc4728b4d9784300764ac2be747f5bcc01646d904b85f4bb"}, - {file = "types_passlib-1.7.7.20240327-py3-none-any.whl", hash = "sha256:3a3b7f4258b71034d2e2f4f307d6810f9904f906cdf375514c8bdbdb28a4ad23"}, -] - -[[package]] -name = "types-pillow" -version = "10.2.0.20240520" -description = "Typing stubs for Pillow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-Pillow-10.2.0.20240520.tar.gz", hash = "sha256:130b979195465fa1e1676d8e81c9c7c30319e8e95b12fae945e8f0d525213107"}, - {file = "types_Pillow-10.2.0.20240520-py3-none-any.whl", hash = "sha256:33c36494b380e2a269bb742181bea5d9b00820367822dbd3760f07210a1da23d"}, -] - -[[package]] -name = "types-pyasn1" -version = "0.6.0.20240402" -description = "Typing stubs for pyasn1" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyasn1-0.6.0.20240402.tar.gz", hash = "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a"}, - {file = "types_pyasn1-0.6.0.20240402-py3-none-any.whl", hash = "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"}, -] - -[[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" -description = "Typing stubs for pyOpenSSL" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, - {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - -[[package]] -name = "types-python-jose" -version = "3.3.4.20240106" -description = "Typing stubs for python-jose" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-jose-3.3.4.20240106.tar.gz", hash = "sha256:b18cf8c5080bbfe1ef7c3b707986435d9efca3e90889acb6a06f65e06bc3405a"}, - {file = "types_python_jose-3.3.4.20240106-py3-none-any.whl", hash = "sha256:b515a6c0c61f5e2a53bc93e3a2b024cbd42563e2e19cbde9fd1c2cc2cfe77ccc"}, -] - -[package.dependencies] -types-pyasn1 = "*" - -[[package]] -name = "types-pytz" -version = "2024.1.0.20240417" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pytz-2024.1.0.20240417.tar.gz", hash = "sha256:6810c8a1f68f21fdf0f4f374a432487c77645a0ac0b31de4bf4690cf21ad3981"}, - {file = "types_pytz-2024.1.0.20240417-py3-none-any.whl", hash = "sha256:8335d443310e2db7b74e007414e74c4f53b67452c0cb0d228ca359ccfba59659"}, -] - -[[package]] -name = "types-pywin32" -version = "306.0.0.20240806" -description = "Typing stubs for pywin32" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pywin32-306.0.0.20240806.tar.gz", hash = "sha256:aaac3c52f5ca7e9a2f79a838a51bc467babd09c0143cfcae62c1164a1ebb7964"}, - {file = "types_pywin32-306.0.0.20240806-py3-none-any.whl", hash = "sha256:70bcc3a950aa519881f6928bae6c05a2e3f46adaa5fa5110a4608dc6fb813d1e"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240808" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, -] - -[[package]] -name = "types-redis" -version = "4.6.0.20240806" -description = "Typing stubs for redis" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-redis-4.6.0.20240806.tar.gz", hash = "sha256:60dd02c2b91ea2d42ad079ac58dedc31d71d6eedb1c21d3796811b02baac655d"}, - {file = "types_redis-4.6.0.20240806-py3-none-any.whl", hash = "sha256:9d8fbe0ce37e3660c0a06982db7812384295d10a93d637c7f8604a2f3c88b0e6"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - -[[package]] -name = "types-requests" -version = "2.32.0.20240712" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, - {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, -] - -[package.dependencies] -urllib3 = ">=2" - -[[package]] -name = "types-setuptools" -version = "71.1.0.20240813" -description = "Typing stubs for setuptools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-setuptools-71.1.0.20240813.tar.gz", hash = "sha256:94ff4f0af18c7c24ac88932bcb0f5655fb7187a001b7c61e53a1bfdaf9877b54"}, - {file = "types_setuptools-71.1.0.20240813-py3-none-any.whl", hash = "sha256:d9d9ba2936f5d3b47b59ae9bf65942a60063ac1d6bbee180a8a79fbb43f22ce5"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, -] - -[[package]] -name = "uncurl" -version = "0.0.11" -description = "A library to convert curl requests to python-requests." -optional = false -python-versions = "*" -files = [ - {file = "uncurl-0.0.11-py3-none-any.whl", hash = "sha256:5961e93f07a5c9f2ef8ae4245bd92b0a6ce503c851de980f5b70080ae74cdc59"}, - {file = "uncurl-0.0.11.tar.gz", hash = "sha256:530c9bbd4d118f4cde6194165ff484cc25b0661cd256f19e9d5fcb53fc077790"}, -] - -[package.dependencies] -pyperclip = "*" -six = "*" - -[[package]] -name = "unstructured" -version = "0.14.10" -description = "A library that prepares raw documents for downstream ML tasks." -optional = false -python-versions = "<3.13,>=3.9.0" -files = [ - {file = "unstructured-0.14.10-py3-none-any.whl", hash = "sha256:127052de54f0c49cd7c4432dd5e6c1f3fe0b8521192e32b126a0758dcc06da65"}, - {file = "unstructured-0.14.10.tar.gz", hash = "sha256:7076567a27e204ddbe129eb99e66bcc90c1cae6c4d4138506f6d555a527be73f"}, -] - -[package.dependencies] -backoff = "*" -beautifulsoup4 = "*" -chardet = "*" -dataclasses-json = "*" -emoji = "*" -filetype = "*" -langdetect = "*" -lxml = "*" -markdown = {version = "*", optional = true, markers = "extra == \"md\""} -nltk = "*" -numpy = "<2" -psutil = "*" -python-docx = {version = ">=1.1.2", optional = true, markers = "extra == \"docx\""} -python-iso639 = "*" -python-magic = "*" -python-pptx = {version = "<=0.6.23", optional = true, markers = "extra == \"pptx\""} -rapidfuzz = "*" -requests = "*" -tabulate = "*" -tqdm = "*" -typing-extensions = "*" -unstructured-client = "*" -wrapt = "*" - -[package.extras] -airtable = ["pyairtable"] -all-docs = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "pytesseract", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] -astra = ["astrapy"] -azure = ["adlfs", "fsspec"] -azure-cognitive-search = ["azure-search-documents"] -bedrock = ["boto3", "langchain-community"] -biomed = ["bs4"] -box = ["boxfs", "fsspec"] -chroma = ["chromadb", "importlib-metadata (>=7.1.0)", "typer (<=0.9.0)"] -clarifai = ["clarifai"] -confluence = ["atlassian-python-api"] -csv = ["pandas"] -databricks-volumes = ["databricks-sdk"] -delta-table = ["deltalake", "fsspec"] -discord = ["discord-py"] -doc = ["python-docx (>=1.1.2)"] -docx = ["python-docx (>=1.1.2)"] -dropbox = ["dropboxdrivefs", "fsspec"] -elasticsearch = ["elasticsearch[async]"] -embed-huggingface = ["huggingface", "langchain-community", "sentence-transformers"] -embed-octoai = ["openai", "tiktoken"] -embed-vertexai = ["langchain", "langchain-community", "langchain-google-vertexai"] -embed-voyageai = ["langchain", "langchain-voyageai"] -epub = ["pypandoc"] -gcs = ["bs4", "fsspec", "gcsfs"] -github = ["pygithub (>1.58.0)"] -gitlab = ["python-gitlab"] -google-drive = ["google-api-python-client"] -hubspot = ["hubspot-api-client", "urllib3"] -huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "pytesseract", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] -jira = ["atlassian-python-api"] -kafka = ["confluent-kafka"] -local-inference = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypandoc", "pypdf", "pytesseract", "python-docx (>=1.1.2)", "python-oxmsg", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] -md = ["markdown"] -mongodb = ["pymongo"] -msg = ["python-oxmsg"] -notion = ["htmlBuilder", "notion-client"] -odt = ["pypandoc", "python-docx (>=1.1.2)"] -onedrive = ["Office365-REST-Python-Client", "bs4", "msal"] -openai = ["langchain-community", "openai", "tiktoken"] -opensearch = ["opensearch-py"] -org = ["pypandoc"] -outlook = ["Office365-REST-Python-Client", "msal"] -paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] -pdf = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pikepdf", "pillow-heif", "pypdf", "pytesseract", "unstructured-inference (==0.7.36)", "unstructured.pytesseract (>=0.3.12)"] -pinecone = ["pinecone-client (>=3.7.1)"] -postgres = ["psycopg2-binary"] -ppt = ["python-pptx (<=0.6.23)"] -pptx = ["python-pptx (<=0.6.23)"] -qdrant = ["qdrant-client"] -reddit = ["praw"] -rst = ["pypandoc"] -rtf = ["pypandoc"] -s3 = ["fsspec", "s3fs"] -salesforce = ["simple-salesforce"] -sftp = ["fsspec", "paramiko"] -sharepoint = ["Office365-REST-Python-Client", "msal"] -singlestore = ["singlestoredb"] -slack = ["slack-sdk"] -tsv = ["pandas"] -weaviate = ["weaviate-client"] -wikipedia = ["wikipedia"] -xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] - -[[package]] -name = "unstructured-client" -version = "0.25.5" -description = "Python Client SDK for Unstructured API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "unstructured-client-0.25.5.tar.gz", hash = "sha256:adb97ea56ce65f8b277d5b05f093e9d13a3320ac8dea7265ffa71f5e13ed5f84"}, - {file = "unstructured_client-0.25.5-py3-none-any.whl", hash = "sha256:23537fee984e43d06a75f986a73e420a9659cc92010afb8324fbf67c85962eaf"}, -] - -[package.dependencies] -certifi = ">=2023.7.22" -charset-normalizer = ">=3.2.0" -dataclasses-json = ">=0.6.4" -deepdiff = ">=6.0" -httpx = ">=0.27.0" -idna = ">=3.4" -jsonpath-python = ">=1.0.6" -marshmallow = ">=3.19.0" -mypy-extensions = ">=1.0.0" -nest-asyncio = ">=1.6.0" -packaging = ">=23.1" -pypdf = ">=4.0" -python-dateutil = ">=2.8.2" -requests = ">=2.31.0" -requests-toolbelt = ">=1.0.0" -six = ">=1.16.0" -typing-extensions = ">=4.7.1" -typing-inspect = ">=0.9.0" -urllib3 = ">=1.26.18" - -[package.extras] -dev = ["pylint (==3.1.0)"] - -[[package]] -name = "upstash-vector" -version = "0.4.0" -description = "Serverless Vector SDK from Upstash" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "upstash_vector-0.4.0-py3-none-any.whl", hash = "sha256:1ba11d8fc7d036bf93fde741b862c9e04ad962397dc600d1dc7546b63a84da82"}, - {file = "upstash_vector-0.4.0.tar.gz", hash = "sha256:a8ae11b2d3989c2615f1f06c66af39da763af7f7239b625fede621bf2fbb997d"}, -] - -[package.dependencies] -httpx = ">=0.24.0,<0.28" - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -optional = false -python-versions = ">=3.6" -files = [ - {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, - {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, -] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.dependencies] -pysocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""} - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uuid6" -version = "2024.1.12" -description = "New time-based UUID formats which are suited for use as a database key" -optional = false -python-versions = ">=3.8" -files = [ - {file = "uuid6-2024.1.12-py3-none-any.whl", hash = "sha256:8150093c8d05a331bc0535bc5ef6cf57ac6eceb2404fd319bc10caee2e02c065"}, - {file = "uuid6-2024.1.12.tar.gz", hash = "sha256:ed0afb3a973057575f9883201baefe402787ca5e11e1d24e377190f0c43f1993"}, -] - -[[package]] -name = "uvicorn" -version = "0.30.6" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} -h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "validators" -version = "0.33.0" -description = "Python Data Validation for Humans™" -optional = false -python-versions = ">=3.8" -files = [ - {file = "validators-0.33.0-py3-none-any.whl", hash = "sha256:134b586a98894f8139865953899fc2daeb3d0c35569552c5518f089ae43ed075"}, - {file = "validators-0.33.0.tar.gz", hash = "sha256:535867e9617f0100e676a1257ba1e206b9bfd847ddc171e4d44811f07ff0bfbf"}, -] - -[package.extras] -crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] - -[[package]] -name = "vine" -version = "5.1.0" -description = "Python promises." -optional = true -python-versions = ">=3.6" -files = [ - {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, - {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, -] - -[[package]] -name = "virtualenv" -version = "20.26.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "vulture" -version = "2.11" -description = "Find dead code" -optional = false -python-versions = ">=3.8" -files = [ - {file = "vulture-2.11-py2.py3-none-any.whl", hash = "sha256:12d745f7710ffbf6aeb8279ba9068a24d4e52e8ed333b8b044035c9d6b823aba"}, - {file = "vulture-2.11.tar.gz", hash = "sha256:f0fbb60bce6511aad87ee0736c502456737490a82d919a44e6d92262cb35f1c2"}, -] - -[package.dependencies] -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "watchfiles" -version = "0.23.0" -description = "Simple, modern and high performance file watching and code reload in python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"}, - {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"}, - {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"}, - {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"}, - {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"}, - {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"}, - {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"}, - {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"}, - {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"}, - {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"}, - {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"}, - {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2dddc2487d33e92f8b6222b5fb74ae2cfde5e8e6c44e0248d24ec23befdc5366"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e75695cc952e825fa3e0684a7f4a302f9128721f13eedd8dbd3af2ba450932b8"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2537ef60596511df79b91613a5bb499b63f46f01a11a81b0a2b0dedf645d0a9c"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20b423b58f5fdde704a226b598a2d78165fe29eb5621358fe57ea63f16f165c4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98732ec893975455708d6fc9a6daab527fc8bbe65be354a3861f8c450a632a4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee1f5fcbf5bc33acc0be9dd31130bcba35d6d2302e4eceafafd7d9018c7755ab"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f195338a5a7b50a058522b39517c50238358d9ad8284fd92943643144c0c03"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fcb8d59b0dbee2c9b32207084b67b2420f6431ed02c18bd191e6c575f5c48"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0eff099a4df36afaa0eea7a913aa64dcf2cbd4e7a4f319a73012210af4d23810"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a8323daae27ea290ba3350c70c836c0d2b0fb47897fa3b0ca6a5375b952b90d3"}, - {file = "watchfiles-0.23.0-cp38-none-win32.whl", hash = "sha256:aafea64a3ae698695975251f4254df2225e2624185a69534e7fe70581066bc1b"}, - {file = "watchfiles-0.23.0-cp38-none-win_amd64.whl", hash = "sha256:c846884b2e690ba62a51048a097acb6b5cd263d8bd91062cd6137e2880578472"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a753993635eccf1ecb185dedcc69d220dab41804272f45e4aef0a67e790c3eb3"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6bb91fa4d0b392f0f7e27c40981e46dda9eb0fbc84162c7fb478fe115944f491"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1f67312efa3902a8e8496bfa9824d3bec096ff83c4669ea555c6bdd213aa516"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ca6b71dcc50d320c88fb2d88ecd63924934a8abc1673683a242a7ca7d39e781"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aec5c29915caf08771d2507da3ac08e8de24a50f746eb1ed295584ba1820330"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1733b9bc2c8098c6bdb0ff7a3d7cb211753fecb7bd99bdd6df995621ee1a574b"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02ff5d7bd066c6a7673b17c8879cd8ee903078d184802a7ee851449c43521bdd"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e2de19801b0eaa4c5292a223effb7cfb43904cb742c5317a0ac686ed604765"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8ada449e22198c31fb013ae7e9add887e8d2bd2335401abd3cbc55f8c5083647"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3af1b05361e1cc497bf1be654a664750ae61f5739e4bb094a2be86ec8c6db9b6"}, - {file = "watchfiles-0.23.0-cp39-none-win32.whl", hash = "sha256:486bda18be5d25ab5d932699ceed918f68eb91f45d018b0343e3502e52866e5e"}, - {file = "watchfiles-0.23.0-cp39-none-win_amd64.whl", hash = "sha256:d2d42254b189a346249424fb9bb39182a19289a2409051ee432fb2926bad966a"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aba037c1310dd108411d27b3d5815998ef0e83573e47d4219f45753c710f969f"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a96ac14e184aa86dc43b8a22bb53854760a58b2966c2b41580de938e9bf26ed0"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11698bb2ea5e991d10f1f4f83a39a02f91e44e4bd05f01b5c1ec04c9342bf63c"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efadd40fca3a04063d40c4448c9303ce24dd6151dc162cfae4a2a060232ebdcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:556347b0abb4224c5ec688fc58214162e92a500323f50182f994f3ad33385dcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1cf7f486169986c4b9d34087f08ce56a35126600b6fef3028f19ca16d5889071"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18de0f82c62c4197bea5ecf4389288ac755896aac734bd2cc44004c56e4ac47"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532e1f2c491274d1333a814e4c5c2e8b92345d41b12dc806cf07aaff786beb66"}, - {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"}, -] - -[package.dependencies] -anyio = ">=3.0.0" - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "weaviate-client" -version = "4.7.1" -description = "A python native Weaviate client" -optional = false -python-versions = ">=3.8" -files = [ - {file = "weaviate_client-4.7.1-py3-none-any.whl", hash = "sha256:342f5c67b126cee4dc3a60467ad1ae74971cd5614e27af6fb13d687a345352c4"}, - {file = "weaviate_client-4.7.1.tar.gz", hash = "sha256:af99ac4e53613d2ff5b797372e95d004d0c8a1dd10a7f592068bcb423a30af30"}, -] - -[package.dependencies] -authlib = ">=1.2.1,<2.0.0" -grpcio = ">=1.57.0,<2.0.0" -grpcio-health-checking = ">=1.57.0,<2.0.0" -grpcio-tools = ">=1.57.0,<2.0.0" -httpx = ">=0.25.0,<=0.27.0" -pydantic = ">=2.5.0,<3.0.0" -requests = ">=2.30.0,<3.0.0" -validators = "0.33.0" - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[[package]] -name = "wikipedia" -version = "1.4.0" -description = "Wikipedia API for Python" -optional = false -python-versions = "*" -files = [ - {file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -requests = ">=2.0.0,<3.0.0" - -[[package]] -name = "win32-setctime" -version = "1.1.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -files = [ - {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, - {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, -] - -[package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] - -[[package]] -name = "wolframalpha" -version = "5.1.3" -description = "Wolfram|Alpha 2.0 API client" -optional = false -python-versions = ">=3.8" -files = [ - {file = "wolframalpha-5.1.3-py3-none-any.whl", hash = "sha256:549b44e64595c5845be4c94f2b306a84832157bef422b20937cbca44b48ee117"}, - {file = "wolframalpha-5.1.3.tar.gz", hash = "sha256:56226efeca0f55acec5e17dd2f6537a178d0bf4feec4df0615165e2968bb49b8"}, -] - -[package.dependencies] -httpx = "*" -"jaraco.context" = "*" -more-itertools = "*" -multidict = "*" -xmltodict = "*" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["keyring", "pmxbot", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "wsproto" -version = "1.2.0" -description = "WebSockets state-machine based protocol implementation" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, - {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, -] - -[package.dependencies] -h11 = ">=0.9.0,<1" - -[[package]] -name = "xlsxwriter" -version = "3.2.0" -description = "A Python module for creating Excel XLSX files." -optional = false -python-versions = ">=3.6" -files = [ - {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, - {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, -] - -[[package]] -name = "xmltodict" -version = "0.13.0" -description = "Makes working with XML feel like you are working with JSON" -optional = false -python-versions = ">=3.4" -files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] - -[[package]] -name = "xxhash" -version = "3.4.1" -description = "Python binding for xxHash" -optional = false -python-versions = ">=3.7" -files = [ - {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, - {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, - {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, - {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, - {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, - {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, - {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, - {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, - {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, - {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, - {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, - {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, - {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, - {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, - {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, - {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, - {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, - {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, - {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, - {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "yfinance" -version = "0.2.41" -description = "Download market data from Yahoo! Finance API" -optional = false -python-versions = "*" -files = [ - {file = "yfinance-0.2.41-py2.py3-none-any.whl", hash = "sha256:2ed7b453cb8568773eb2dbb4d87cc37ff02e5d133f7723ec3e219ab0b86b56d8"}, - {file = "yfinance-0.2.41.tar.gz", hash = "sha256:f94409a1ed4d596b9da8d2dbb498faaabfcf593d5870e1412e17669a212bb345"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.11.1" -frozendict = ">=2.3.4" -html5lib = ">=1.1" -lxml = ">=4.9.1" -multitasking = ">=0.0.7" -numpy = ">=1.16.5" -pandas = ">=1.3.0" -peewee = ">=3.16.2" -platformdirs = ">=2.0.0" -pytz = ">=2022.5" -requests = ">=2.31" - -[package.extras] -nospam = ["requests-cache (>=1.0)", "requests-ratelimiter (>=0.3.1)"] -repair = ["scipy (>=1.6.3)"] - -[[package]] -name = "youtube-transcript-api" -version = "0.6.2" -description = "This is an python API which allows you to get the transcripts/subtitles for a given YouTube video. It also works for automatically generated subtitles, supports translating subtitles and it does not require a headless browser, like other selenium based solutions do!" -optional = false -python-versions = "*" -files = [ - {file = "youtube_transcript_api-0.6.2-py3-none-any.whl", hash = "sha256:019dbf265c6a68a0591c513fff25ed5a116ce6525832aefdfb34d4df5567121c"}, - {file = "youtube_transcript_api-0.6.2.tar.gz", hash = "sha256:cad223d7620633cec44f657646bffc8bbc5598bd8e70b1ad2fa8277dec305eb7"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "zep-python" -version = "2.0.0rc6" -description = "Long-Term Memory for AI Assistants. This is the Python client for the Zep service." -optional = false -python-versions = "<4,>=3.9.0" -files = [ - {file = "zep_python-2.0.0rc6-py3-none-any.whl", hash = "sha256:2719e06897957facd4e5edbf89706c01d3c68b0c0543bbe24da56a57bff99d68"}, - {file = "zep_python-2.0.0rc6.tar.gz", hash = "sha256:27a6632068c5ae6bac30d0626d49f8f5e5bc2a3922557ce11bf8df006ca071f4"}, -] - -[package.dependencies] -httpx = ">=0.24.0,<0.29.0" -pydantic = ">=2.0.0" - -[[package]] -name = "zipp" -version = "3.20.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[extras] -cassio = ["cassio"] -couchbase = ["couchbase"] -deploy = ["celery", "flower", "redis"] -local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.10,<3.13" -content-hash = "3bdfc3e3b86f7e417c34972e5e2251d079602df87650bdc6d6b56d846dbc8a48" diff --git a/pyproject.toml b/pyproject.toml index 00ead0ee0389..a5b5b548ad6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,154 +1,188 @@ -[tool.poetry] + + +[tool.uv.sources] +langflow-base = { workspace = true } +langflow = { workspace = true } + +[tool.uv.workspace] +members = ["src/backend/base", "."] + +[tool.hatch.build.targets.wheel] +packages = ["src/backend/langflow"] + +[project] name = "langflow" -version = "1.0.17" +version = "1.1.0" description = "A Python package with a built-in web application" -authors = ["Langflow "] -maintainers = [ - "Carlos Coelho ", - "Cristhian Zanforlin ", - "Gabriel Almeida ", - "Igor Carvalho ", - "Lucas Eduoli ", - "Otávio Anovazzi ", - "Rodrigo Nader ", - "Italo dos Anjos ", -] -repository = "https://github.com/langflow-ai/langflow" +requires-python = ">=3.10,<3.13" license = "MIT" -readme = "README.md" keywords = ["nlp", "langchain", "openai", "gpt", "gui"] -packages = [{ include = "langflow", from = "src/backend" }] -include = ["src/backend/langflow/*", "src/backend/langflow/**/*"] -documentation = "https://docs.langflow.org" +readme = "README.md" +maintainers = [ + { name = "Carlos Coelho", email = "carlos@langflow.org" }, + { name = "Cristhian Zanforlin", email = "cristhian.lousa@gmail.com" }, + { name = "Gabriel Almeida", email = "gabriel@langflow.org" }, + { name = "Igor Carvalho", email = "igorr.ackerman@gmail.com" }, + { name = "Lucas Eduoli", email = "lucaseduoli@gmail.com" }, + { name = "Otávio Anovazzi", email = "otavio2204@gmail.com" }, + { name = "Rodrigo Nader", email = "rodrigo@langflow.org" }, + { name = "Italo dos Anjos", email = "italojohnnydosanjos@gmail.com" }, +] -[tool.poetry.scripts] +# Define your main dependencies here +dependencies = [ + "langflow-base==0.1.0", + "beautifulsoup4>=4.12.2", + "google-search-results>=2.4.1", + "google-api-python-client>=2.130.0", + "huggingface-hub[inference]>=0.23.2", + "networkx>=3.1", + "fake-useragent>=1.5.0", + "pyarrow>=14.0.0", + "wikipedia>=1.4.0", + "qdrant-client~=1.9.2", + "weaviate-client>=4.8", + "cohere>=5.5.3", + "faiss-cpu>=1.8.0", + "types-cachetools>=5.3.0.5", + "pymongo>=4.6.0", + "supabase~=2.6.0", + "certifi>=2023.11.17,<2025.0.0", + "fastavro>=1.8.0", + "redis>=5.0.1", + "metaphor-python>=0.1.11", + 'pywin32>=306; sys_platform == "win32"', + "langfuse~=2.53.1", + "metal-sdk>=2.5.0", + "markupsafe>=2.1.3", + "boto3~=1.34.162", + "numexpr>=2.8.6", + "qianfan==0.3.5", + "pgvector>=0.2.3", + "pyautogen>=0.2.0", + "langchain~=0.3.3", + "elasticsearch>=8.12.0", + "pytube>=15.0.0", + "dspy-ai>=2.4.0", + "assemblyai>=0.33.0", + "litellm>=1.44.0", + "chromadb>=0.4", + "zep-python>=2.0.0", + "langchain-groq>=0.1.9", + "langchain-pinecone>=0.1.3", + "youtube-transcript-api>=0.6.2", + "markdown>=3.7", + "upstash-vector>=0.5.0", + "gitpython>=3.1.43", + "kubernetes>=30.1.0", + "firecrawl-py==1.5.0", + "json-repair>=0.25.2", + "langchain-google-calendar-tools>=0.0.1", + "langchain-milvus>=0.1.1", + "langwatch==0.1.16", + "langsmith~=0.1.136", + "yfinance>=0.2.40", + "wolframalpha>=5.1.3", + "astra-assistants[tools]~=2.2.6", + "composio-langchain==0.5.9", + "spider-client>=0.0.27", + "nltk>=3.9.1", + "lark>=1.2.2", + "jq>=1.8.0", + "pydantic-settings==2.4.0", + "ragstack-ai-knowledge-store>=0.2.1", + "duckduckgo-search>=6.3.4", + "opensearch-py>=2.7.1", + "langchain-google-genai>=1.0.8", + "langchain-cohere>=0.1.5", + "langchain-anthropic>=0.1.23", + "langchain-astradb~=0.5.2", + "langchain-openai~=0.2.2", + "langchain-google-vertexai~=2.0.5", + "langchain-groq>=0.1.9", + "langchain-pinecone~=0.2.0", + "langchain-mistralai~=0.2.0", + "langchain-chroma>=0.1.1", + "langchain-aws>=0.1.16", + "langchain-unstructured~=0.1.5", + "langchain-mongodb>=0.1.6", + "langchain-nvidia-ai-endpoints~=0.3.0", + "langchain-google-calendar-tools>=0.0.1", + "langchain-milvus>=0.1.1", + "langchain-google-community~=2.0.1", + "langchain-elasticsearch>=0.2.0", + "opensearch-py>=2.7.1", + "langchain-ollama>=0.2.0", + "sqlalchemy[aiosqlite,postgresql_psycopg2binary,postgresql_psycopgbinary]>=2.0.36", + "atlassian-python-api>=3.41.16", + "mem0ai>=0.1.26", +] + +[project.urls] +Repository = "https://github.com/langflow-ai/langflow" +Documentation = "https://docs.langflow.org" + +[project.optional-dependencies] +deploy = [ + "celery[redis]>=5.3.6", + "flower>=2.0.0" +] +couchbase = [ + "couchbase>=4.2.1" +] +cassio = [ + "cassio>=0.1.7" +] +local = [ + "llama-cpp-python~=0.2.0", + "sentence-transformers>=2.3.1", + "ctransformers>=0.2.10" +] +clickhouse-connect = [ + "clickhouse-connect==0.7.19" +] + +[project.scripts] langflow = "langflow.__main__:main" +[tool.uv] +dev-dependencies = [ + "pytest-instafail>=0.5.0", + "types-redis>=4.6.0.5", + "ipykernel>=6.29.0", + "mypy>=1.11.0", + "ruff>=0.6.2,<0.7.0", + "httpx>=0.27.0", + "pytest>=8.2.0", + "types-requests>=2.32.0", + "requests>=2.32.0", + "pytest-cov>=5.0.0", + "pandas-stubs>=2.1.4.231227", + "types-pillow>=10.2.0.20240213", + "types-pyyaml>=6.0.12.8", + "types-python-jose>=3.3.4.8", + "types-passlib>=1.7.7.13", + "pytest-mock>=3.14.0", + "pytest-xdist>=3.6.0", + "types-pywin32>=306.0.0.4", + "types-google-cloud-ndb>=2.2.0.0", + "pytest-sugar>=1.0.0", + "respx>=0.21.1", + "pytest-asyncio>=0.23.0", + "pytest-profiling>=1.7.0", + "pre-commit>=3.7.0", + "vulture>=2.11", + "dictdiffer>=0.9.0", + "pytest-split>=0.9.0", + "pytest-flakefinder>=1.1.0", + "types-markdown>=3.7.0.20240822", + "packaging>=24.1,<25.0", + "asgi-lifespan>=2.1.0", + "pytest-github-actions-annotate-failures>=0.2.0", + "pytest-codspeed>=3.0.0", + "forbiddenfruit>=0.1.4", +] -[tool.poetry.dependencies] -python = ">=3.10,<3.13" -langflow-base = { path = "./src/backend/base", develop = true } -beautifulsoup4 = "^4.12.2" -google-search-results = "^2.4.1" -google-api-python-client = "^2.130.0" -huggingface-hub = { version = "^0.22.0", extras = ["inference"] } -llama-cpp-python = { version = "~0.2.0", optional = true } -networkx = "^3.1" -fake-useragent = "^1.5.0" -psycopg2-binary = "^2.9.6" -pyarrow = "^14.0.0" -wikipedia = "^1.4.0" -qdrant-client = "^1.9.0" -weaviate-client = "*" -sentence-transformers = { version = "^2.3.1", optional = true } -ctransformers = { version = "^0.2.10", optional = true } -cohere = "^5.5.3" -faiss-cpu = "^1.8.0" -types-cachetools = "^5.3.0.5" -pinecone-client = "^3.0.3" -pymongo = "^4.6.0" -supabase = "^2.3.0" -certifi = ">=2023.11.17,<2025.0.0" -psycopg = "3.1.9" # https://github.com/psycopg/psycopg/issues/858 -fastavro = "^1.8.0" -celery = { extras = ["redis"], version = "^5.3.6", optional = true } -redis = "^5.0.1" -flower = { version = "^2.0.0", optional = true } -metaphor-python = "^0.1.11" -pywin32 = { version = "^306", markers = "sys_platform == 'win32'" } -langfuse = "^2.33.0" -metal-sdk = "^2.5.0" -markupsafe = "^2.1.3" -boto3 = "~1.34.162" -numexpr = "^2.8.6" -qianfan = "0.3.5" -pgvector = "^0.2.3" -pyautogen = "^0.2.0" -langchain-google-genai = "1.0.8" -langchain-cohere = "^0.1.5" -elasticsearch = "^8.12.0" -pytube = "^15.0.0" -dspy-ai = "^2.4.0" -assemblyai = "^0.26.0" -litellm = "^1.44.0" -chromadb = "^0.4" -langchain-anthropic = "^0.1.23" -langchain-astradb = "^0.3.3" -langchain-openai = "0.1.22" -zep-python = { version = "^2.0.0rc5", allow-prereleases = true } -langchain-google-vertexai = "1.0.7" -langchain-groq = "0.1.6" -langchain-pinecone = "^0.1.0" -langchain-mistralai = "0.1.10" -couchbase = { extras = ["couchbase"], version = "^4.2.1", optional = true } -youtube-transcript-api = "^0.6.2" -markdown = "^3.6" -langchain-chroma = "^0.1.1" -upstash-vector = "^0.4.0" -gitpython = "^3.1.43" -cassio = { extras = ["cassio"], version = "^0.1.7", optional = true } -unstructured = {extras = ["docx", "md", "pptx"], version = "^0.14.4"} -langchain-aws = "^0.1.16" -langchain-mongodb = "^0.1.6" -kubernetes = "^30.1.0" -firecrawl-py = "^0.0.16" -json-repair = "^0.25.2" -langchain-nvidia-ai-endpoints = "0.1.6" -langchain-google-calendar-tools = "^0.0.1" -langchain-milvus = "^0.1.1" -crewai = {extras = ["tools"], version = "^0.36.0"} -langwatch = "^0.1.16" -langsmith = "^0.1.86" -yfinance = "^0.2.40" -langchain-google-community = "1.0.7" -wolframalpha = "^5.1.3" -astra-assistants = "^2.0.15" -composio-langchain = "^0.5.3" -spider-client = "^0.0.27" - - -[tool.poetry.group.dev.dependencies] -types-redis = "^4.6.0.5" -ipykernel = "^6.29.0" -mypy = "^1.11.0" -ruff = "^0.4.5" -httpx = "*" -pytest = "^8.2.0" -types-requests = "^2.32.0" -requests = "^2.32.0" -pytest-cov = "^5.0.0" -pandas-stubs = "^2.1.4.231227" -types-pillow = "^10.2.0.20240213" -types-pyyaml = "^6.0.12.8" -types-python-jose = "^3.3.4.8" -types-passlib = "^1.7.7.13" -pytest-mock = "^3.14.0" -pytest-xdist = "^3.6.0" -types-pywin32 = "^306.0.0.4" -types-google-cloud-ndb = "^2.2.0.0" -pytest-sugar = "^1.0.0" -respx = "^0.21.1" -pytest-instafail = "^0.5.0" -pytest-asyncio = "^0.23.0" -pytest-profiling = "^1.7.0" -pre-commit = "^3.7.0" -vulture = "^2.11" -dictdiffer = "^0.9.0" -pytest-split = "^0.9.0" -pytest-flakefinder = "^1.1.0" - -[tool.poetry.extras] -deploy = ["celery", "redis", "flower"] -couchbase = ["couchbase"] -cassio = ["cassio"] -local = ["llama-cpp-python", "sentence-transformers", "ctransformers"] - - -[tool.poetry.group.spelling] -optional = true - -[tool.poetry.group.spelling.dependencies] -codespell = "^2.3.0" [tool.codespell] skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,*.trig.,**/node_modules/**,./stuff/*,*.csv' @@ -165,7 +199,8 @@ log_cli = true log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)" log_cli_date_format = "%Y-%m-%d %H:%M:%S" markers = ["async_test", "api_key_required"] - +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" [tool.coverage.run] command_line = """ @@ -192,6 +227,43 @@ directory = "coverage" exclude = ["src/backend/langflow/alembic/*"] line-length = 120 +[tool.ruff.lint] +pydocstyle.convention = "google" +select = ["ALL"] +ignore = [ + "C90", # McCabe complexity + "CPY", # Missing copyright + "COM812", # Messes with the formatter + "ERA", # Eradicate commented-out code + "FIX002", # Line contains TODO + "ISC001", # Messes with the formatter + "PERF203", # Rarely useful + "PLR09", # Too many something (arg, statements, etc) + "RUF012", # Pydantic models are currently not well detected. See https://github.com/astral-sh/ruff/issues/13630 + "TD002", # Missing author in TODO + "TD003", # Missing issue link in TODO + "TRY301", # A bit too harsh (Abstract `raise` to an inner function) + + # Rules that are TODOs + "ANN", +] + +# Preview rules that are not yet activated +external = ["RUF027"] + +[tool.ruff.lint.per-file-ignores] +"scripts/*" = [ + "D1", + "INP", + "T201", +] +"src/backend/tests/*" = [ + "D1", + "PLR2004", + "S101", + "SLF001", +] + [tool.mypy] plugins = ["pydantic.mypy"] follow_imports = "skip" @@ -201,5 +273,5 @@ mypy_path = "langflow" ignore_missing_imports = true [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/src/backend/base/langflow/components/embeddings/util/__init__.py b/scripts/__init__.py similarity index 100% rename from src/backend/base/langflow/components/embeddings/util/__init__.py rename to scripts/__init__.py diff --git a/src/backend/base/langflow/services/monitor/__init__.py b/scripts/ci/__init__.py similarity index 100% rename from src/backend/base/langflow/services/monitor/__init__.py rename to scripts/ci/__init__.py diff --git a/scripts/ci/pypi_nightly_tag.py b/scripts/ci/pypi_nightly_tag.py new file mode 100755 index 000000000000..d15dffc6c160 --- /dev/null +++ b/scripts/ci/pypi_nightly_tag.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +"""Idea from https://github.com/streamlit/streamlit/blob/4841cf91f1c820a392441092390c4c04907f9944/scripts/pypi_nightly_create_tag.py.""" + +import sys + +import packaging.version +from packaging.version import Version + +PYPI_LANGFLOW_URL = "https://pypi.org/pypi/langflow/json" +PYPI_LANGFLOW_NIGHTLY_URL = "https://pypi.org/pypi/langflow-nightly/json" + +PYPI_LANGFLOW_BASE_URL = "https://pypi.org/pypi/langflow-base/json" +PYPI_LANGFLOW_BASE_NIGHTLY_URL = "https://pypi.org/pypi/langflow-base-nightly/json" + +ARGUMENT_NUMBER = 2 + + +def get_latest_published_version(build_type: str, *, is_nightly: bool) -> Version: + import requests + + url = "" + if build_type == "base": + url = PYPI_LANGFLOW_BASE_NIGHTLY_URL if is_nightly else PYPI_LANGFLOW_BASE_URL + elif build_type == "main": + url = PYPI_LANGFLOW_NIGHTLY_URL if is_nightly else PYPI_LANGFLOW_URL + else: + msg = f"Invalid build type: {build_type}" + raise ValueError(msg) + + res = requests.get(url, timeout=10) + try: + version_str = res.json()["info"]["version"] + except Exception as e: + msg = "Got unexpected response from PyPI" + raise RuntimeError(msg) from e + return Version(version_str) + + +def create_tag(build_type: str): + current_version = get_latest_published_version(build_type, is_nightly=False) + current_nightly_version = get_latest_published_version(build_type, is_nightly=True) + + build_number = "0" + latest_base_version = current_version.base_version + nightly_base_version = current_nightly_version.base_version + + if latest_base_version == nightly_base_version: + # If the latest version is the same as the nightly version, increment the build number + build_number = str(current_nightly_version.dev + 1) + + new_nightly_version = latest_base_version + ".dev" + build_number + + # Prepend "v" to the version, if DNE. + # This is an update to the nightly version format. + if not new_nightly_version.startswith("v"): + new_nightly_version = "v" + new_nightly_version + + # X.Y.Z.dev.YYYYMMDD + # This takes the base version of the current version and appends the + # current date. If the last release was on the same day, we exit, as + # pypi does not allow for overwriting the same version. + # + # We could use a different versioning scheme, such as just incrementing + # an integer. + # version_with_date = ( + # ".".join([str(x) for x in current_version.release]) + # + ".dev" + # + "0" + # + datetime.now(pytz.timezone("UTC")).strftime("%Y%m%d") + # ) + + # Verify if version is PEP440 compliant. + packaging.version.Version(new_nightly_version) + + return new_nightly_version + + +if __name__ == "__main__": + if len(sys.argv) != ARGUMENT_NUMBER: + msg = "Specify base or main" + raise ValueError(msg) + + build_type = sys.argv[1] + tag = create_tag(build_type) + print(tag) diff --git a/scripts/ci/update_lf_base_dependency.py b/scripts/ci/update_lf_base_dependency.py new file mode 100755 index 000000000000..e3e5d1aabd20 --- /dev/null +++ b/scripts/ci/update_lf_base_dependency.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +import re +import sys +from pathlib import Path + +import packaging.version + +BASE_DIR = Path(__file__).parent.parent.parent +ARGUMENT_NUMBER = 2 + + +def update_base_dep(pyproject_path: str, new_version: str) -> None: + """Update the langflow-base dependency in pyproject.toml.""" + filepath = BASE_DIR / pyproject_path + content = filepath.read_text(encoding="utf-8") + + replacement = f'langflow-base-nightly = "{new_version}"' + + # Updates the pattern for poetry + pattern = re.compile(r'langflow-base = \{ path = "\./src/backend/base", develop = true \}') + if not pattern.search(content): + msg = f'langflow-base poetry dependency not found in "{filepath}"' + raise ValueError(msg) + content = pattern.sub(replacement, content) + filepath.write_text(content, encoding="utf-8") + + +def verify_pep440(version): + """Verify if version is PEP440 compliant. + + https://github.com/pypa/packaging/blob/16.7/packaging/version.py#L191 + """ + return packaging.version.Version(version) + + +def main() -> None: + if len(sys.argv) != ARGUMENT_NUMBER: + msg = "New version not specified" + raise ValueError(msg) + base_version = sys.argv[1] + + # Strip "v" prefix from version if present + base_version = base_version.removeprefix("v") + + verify_pep440(base_version) + update_base_dep("pyproject.toml", base_version) + + +if __name__ == "__main__": + main() diff --git a/scripts/ci/update_pyproject_combined.py b/scripts/ci/update_pyproject_combined.py new file mode 100755 index 000000000000..20c6e3dedfc1 --- /dev/null +++ b/scripts/ci/update_pyproject_combined.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# scripts/ci/update_pyproject_combined.py +import sys +from pathlib import Path + +from update_pyproject_name import update_pyproject_name +from update_pyproject_name import update_uv_dep as update_name_uv_dep +from update_pyproject_version import update_pyproject_version +from update_uv_dependency import update_uv_dep as update_version_uv_dep + +# Add the current directory to the path so we can import the other scripts +current_dir = Path(__file__).resolve().parent +sys.path.append(str(current_dir)) + + +def main(): + """Universal update script that handles both base and main updates in a single run. + + Usage: + update_pyproject_combined.py main + """ + arg_count = 4 + if len(sys.argv) != arg_count: + print("Usage:") + print(" update_pyproject_combined.py main ") + sys.exit(1) + + mode = sys.argv[1] + if mode != "main": + print("Only 'main' mode is supported") + print("Usage: update_pyproject_combined.py main ") + sys.exit(1) + + main_tag = sys.argv[2] + base_tag = sys.argv[3] + + # First handle base package updates + update_pyproject_name("src/backend/base/pyproject.toml", "langflow-base-nightly") + update_name_uv_dep("pyproject.toml", "langflow-base-nightly") + update_pyproject_version("src/backend/base/pyproject.toml", base_tag) + + # Then handle main package updates + update_pyproject_name("pyproject.toml", "langflow-nightly") + update_name_uv_dep("pyproject.toml", "langflow-nightly") + update_pyproject_version("pyproject.toml", main_tag) + # Update dependency version (strip 'v' prefix if present) + base_version = base_tag.lstrip("v") + update_version_uv_dep(base_version) + + +if __name__ == "__main__": + main() diff --git a/scripts/ci/update_pyproject_name.py b/scripts/ci/update_pyproject_name.py new file mode 100755 index 000000000000..38511bf4c14f --- /dev/null +++ b/scripts/ci/update_pyproject_name.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +import re +import sys +from pathlib import Path + +BASE_DIR = Path(__file__).parent.parent.parent +ARGUMENT_NUMBER = 3 + + +def update_pyproject_name(pyproject_path: str, new_project_name: str) -> None: + """Update the project name in pyproject.toml.""" + filepath = BASE_DIR / pyproject_path + content = filepath.read_text(encoding="utf-8") + + # Regex to match the version line under [tool.poetry] + pattern = re.compile(r'(?<=^name = ")[^"]+(?=")', re.MULTILINE) + + if not pattern.search(content): + msg = f'Project name not found in "{filepath}"' + raise ValueError(msg) + content = pattern.sub(new_project_name, content) + + filepath.write_text(content, encoding="utf-8") + + +def update_uv_dep(pyproject_path: str, new_project_name: str) -> None: + """Update the langflow-base dependency in pyproject.toml.""" + filepath = BASE_DIR / pyproject_path + content = filepath.read_text(encoding="utf-8") + + if new_project_name == "langflow-nightly": + pattern = re.compile(r"langflow = \{ workspace = true \}") + replacement = "langflow-nightly = { workspace = true }" + elif new_project_name == "langflow-base-nightly": + pattern = re.compile(r"langflow-base = \{ workspace = true \}") + replacement = "langflow-base-nightly = { workspace = true }" + else: + msg = f"Invalid project name: {new_project_name}" + raise ValueError(msg) + + # Updates the dependency name for uv + if not pattern.search(content): + msg = f"{replacement} uv dependency not found in {filepath}" + raise ValueError(msg) + content = pattern.sub(replacement, content) + filepath.write_text(content, encoding="utf-8") + + +def main() -> None: + if len(sys.argv) != ARGUMENT_NUMBER: + msg = "Must specify project name and build type, e.g. langflow-nightly base" + raise ValueError(msg) + new_project_name = sys.argv[1] + build_type = sys.argv[2] + + if build_type == "base": + update_pyproject_name("src/backend/base/pyproject.toml", new_project_name) + update_uv_dep("pyproject.toml", new_project_name) + elif build_type == "main": + update_pyproject_name("pyproject.toml", new_project_name) + update_uv_dep("pyproject.toml", new_project_name) + else: + msg = f"Invalid build type: {build_type}" + raise ValueError(msg) + + +if __name__ == "__main__": + main() diff --git a/scripts/ci/update_pyproject_version.py b/scripts/ci/update_pyproject_version.py new file mode 100755 index 000000000000..79cbbdc6c35e --- /dev/null +++ b/scripts/ci/update_pyproject_version.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +import re +import sys +from pathlib import Path + +import packaging.version + +BASE_DIR = Path(__file__).parent.parent.parent +ARGUMENT_NUMBER = 3 + + +def update_pyproject_version(pyproject_path: str, new_version: str) -> None: + """Update the version in pyproject.toml.""" + filepath = BASE_DIR / pyproject_path + content = filepath.read_text(encoding="utf-8") + + # Regex to match the version line under [tool.poetry] + pattern = re.compile(r'(?<=^version = ")[^"]+(?=")', re.MULTILINE) + + if not pattern.search(content): + msg = f'Project version not found in "{filepath}"' + raise ValueError(msg) + + content = pattern.sub(new_version, content) + + filepath.write_text(content, encoding="utf-8") + + +def verify_pep440(version): + """Verify if version is PEP440 compliant. + + https://github.com/pypa/packaging/blob/16.7/packaging/version.py#L191 + """ + return packaging.version.Version(version) + + +def main() -> None: + if len(sys.argv) != ARGUMENT_NUMBER: + msg = "New version not specified" + raise ValueError(msg) + new_version = sys.argv[1] + + # Strip "v" prefix from version if present + new_version = new_version.removeprefix("v") + + build_type = sys.argv[2] + + verify_pep440(new_version) + + if build_type == "base": + update_pyproject_version("src/backend/base/pyproject.toml", new_version) + elif build_type == "main": + update_pyproject_version("pyproject.toml", new_version) + else: + msg = f"Invalid build type: {build_type}" + raise ValueError(msg) + + +if __name__ == "__main__": + main() diff --git a/scripts/ci/update_uv_dependency.py b/scripts/ci/update_uv_dependency.py new file mode 100755 index 000000000000..c4ac2e809109 --- /dev/null +++ b/scripts/ci/update_uv_dependency.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +import re +import sys +from pathlib import Path + +BASE_DIR = Path(__file__).parent.parent.parent +ARGUMENT_NUMBER = 2 + + +def update_uv_dep(base_version: str) -> None: + """Update the langflow-base dependency in pyproject.toml.""" + pyproject_path = BASE_DIR / "pyproject.toml" + + # Read the pyproject.toml file content + content = pyproject_path.read_text(encoding="utf-8") + + # For the main project, update the langflow-base dependency in the UV section + pattern = re.compile(r'(dependencies\s*=\s*\[\s*\n\s*)("langflow-base==[\d.]+")') + replacement = rf'\1"langflow-base-nightly=={base_version}"' + + # Check if the pattern is found + if not pattern.search(content): + msg = f"{pattern} UV dependency not found in {pyproject_path}" + raise ValueError(msg) + + # Replace the matched pattern with the new one + content = pattern.sub(replacement, content) + + # Write the updated content back to the file + pyproject_path.write_text(content, encoding="utf-8") + + +def main() -> None: + if len(sys.argv) != ARGUMENT_NUMBER: + msg = "specify base version" + raise ValueError(msg) + base_version = sys.argv[1] + base_version = base_version.lstrip("v") + update_uv_dep(base_version) + + +if __name__ == "__main__": + main() diff --git a/scripts/factory_restart_space.py b/scripts/factory_restart_space.py index 07a25d0de386..9006d5fa7815 100644 --- a/scripts/factory_restart_space.py +++ b/scripts/factory_restart_space.py @@ -1,4 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "huggingface-hub", +# "rich", +# ] +# /// import argparse +import sys from huggingface_hub import HfApi, list_models from rich import print @@ -16,11 +24,11 @@ if not space: print("Please provide a space to restart.") - exit() + sys.exit() if not parsed_args.token: print("Please provide an API token.") - exit() + sys.exit() # Or configure a HfApi client hf_api = HfApi( diff --git a/scripts/setup/check_env.sh b/scripts/setup/check_env.sh old mode 100644 new mode 100755 index 8473255c7028..af0191a9dfb9 --- a/scripts/setup/check_env.sh +++ b/scripts/setup/check_env.sh @@ -1,21 +1,33 @@ #!/bin/bash -# Detect if in a virtual environment (venv or virtualenv) +# Detect and use appropriate Python interpreter from virtual environments if [ -n "$VIRTUAL_ENV" ]; then - exec "$@" -# Detect if in a conda environment + PYTHON_EXEC=python elif [ -n "$CONDA_DEFAULT_ENV" ]; then - exec conda run -n "$CONDA_DEFAULT_ENV" "$@" -# Detect if in a pipenv environment + PYTHON_EXEC=conda run -n "$CONDA_DEFAULT_ENV" python elif [ -f "Pipfile" ]; then - exec pipenv run "$@" -# Detect if in a pyenv environment + PYTHON_EXEC=pipenv run python elif [ -d ".pyenv" ]; then - exec pyenv exec "$@" -# Detect if in a venv environment -elif [ -f "pyvenv.cfg" ]; then - source bin/activate - exec "$@" + PYTHON_EXEC=pyenv exec python else - exec "$@" + PYTHON_EXEC=python fi + +# Check if Python version is compatible +REQUIRED_VERSION=$1 +PYTHON_INSTALLED=$($PYTHON_EXEC -c "import sys; print(sys.version.split()[0])") + +echo "Detected Python version: $PYTHON_INSTALLED" + +$PYTHON_EXEC -c " +import sys +from distutils.version import LooseVersion + +required_version = '$REQUIRED_VERSION' +python_installed = '$PYTHON_INSTALLED' + +min_version, max_version = required_version.replace('>=', '').replace('<', '').split(',') +if not (LooseVersion(min_version) <= LooseVersion(python_installed) < LooseVersion(max_version)): + sys.exit(f'Error: Python version {python_installed} is not compatible with required version {required_version}.') +" || exit 1 + diff --git a/scripts/update_dependencies.py b/scripts/update_dependencies.py deleted file mode 100644 index 12a2b22a1a09..000000000000 --- a/scripts/update_dependencies.py +++ /dev/null @@ -1,62 +0,0 @@ -import re -from pathlib import Path - - -def read_version_from_pyproject(file_path): - with open(file_path, "r") as file: - for line in file: - match = re.search(r'version = "(.*)"', line) - if match: - return match.group(1) - return None - - -# def get_version_from_pypi(package_name): -# import requests - -# response = requests.get(f"https://pypi.org/pypi/{package_name}/json") -# if response.ok: -# return response.json()["info"]["version"] -# return None - - -def get_version_from_pypi(package_name): - # Use default python lib to make the GET for this because it runs in github actions - import json - import urllib.request - - response = urllib.request.urlopen(f"https://pypi.org/pypi/{package_name}/json") - if response.getcode() == 200: - return json.loads(response.read())["info"]["version"] - return None - - -def update_pyproject_dependency(pyproject_path, version): - pattern = re.compile(r'langflow-base = \{ path = "\./src/backend/base", develop = true \}') - replacement = f'langflow-base = "^{version}"' - with open(pyproject_path, "r") as file: - content = file.read() - content = pattern.sub(replacement, content) - with open(pyproject_path, "w") as file: - file.write(content) - - -if __name__ == "__main__": - # Backing up files - pyproject_path = Path(__file__).resolve().parent / "../pyproject.toml" - pyproject_path = pyproject_path.resolve() - with open(pyproject_path, "r") as original, open(pyproject_path.with_name("pyproject.toml.bak"), "w") as backup: - backup.write(original.read()) - # Now backup poetry.lock - with open(pyproject_path.with_name("poetry.lock"), "r") as original, open( - pyproject_path.with_name("poetry.lock.bak"), "w" - ) as backup: - backup.write(original.read()) - - # Reading version and updating pyproject.toml - langflow_base_path = Path(__file__).resolve().parent / "../src/backend/base/pyproject.toml" - version = read_version_from_pyproject(langflow_base_path) - if version: - update_pyproject_dependency(pyproject_path, version) - else: - print("Error: Version not found.") diff --git a/src/backend/tests/integration/astra/__init__.py b/src/backend/base/langflow/__init__.py similarity index 100% rename from src/backend/tests/integration/astra/__init__.py rename to src/backend/base/langflow/__init__.py diff --git a/src/backend/base/langflow/__main__.py b/src/backend/base/langflow/__main__.py index 5e54c7d747c7..95368d5b36ac 100644 --- a/src/backend/base/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -1,17 +1,20 @@ +import asyncio +import inspect import platform +import signal import socket import sys import time import warnings from pathlib import Path -from typing import Optional import click import httpx import typer from dotenv import load_dotenv -from multiprocess import cpu_count # type: ignore -from multiprocess.context import Process # type: ignore +from httpx import HTTPError +from multiprocess import cpu_count +from multiprocess.context import Process from packaging import version as pkg_version from rich import box from rich import print as rprint @@ -24,10 +27,11 @@ from langflow.main import setup_app from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist from langflow.services.database.utils import session_getter -from langflow.services.deps import get_db_service, get_settings_service, session_scope +from langflow.services.deps import async_session_scope, get_db_service, get_settings_service from langflow.services.settings.constants import DEFAULT_SUPERUSER from langflow.services.utils import initialize_services -from langflow.utils.util import update_settings +from langflow.utils.version import fetch_latest_version, get_version_info +from langflow.utils.version import is_pre_release as langflow_is_pre_release console = Console() @@ -41,10 +45,8 @@ def get_number_of_workers(workers=None): return workers -def display_results(results): - """ - Display the results of the migration. - """ +def display_results(results) -> None: + """Display the results of the migration.""" for table_results in results: table = Table(title=f"Migration {table_results.table_name}") table.add_column("Name") @@ -60,117 +62,145 @@ def display_results(results): console.print() # Print a new line -def set_var_for_macos_issue(): +def set_var_for_macos_issue() -> None: # OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES # we need to set this var is we are running on MacOS # otherwise we get an error when running gunicorn - if platform.system() in ["Darwin"]: + if platform.system() == "Darwin": import os os.environ["OBJC_DISABLE_INITIALIZE_FORK_SAFETY"] = "YES" - # https://stackoverflow.com/questions/75747888/uwsgi-segmentation-fault-with-flask-python-app-behind-nginx-after-running-for-2 # noqa + # https://stackoverflow.com/questions/75747888/uwsgi-segmentation-fault-with-flask-python-app-behind-nginx-after-running-for-2 # noqa: E501 os.environ["no_proxy"] = "*" # to avoid error with gunicorn logger.debug("Set OBJC_DISABLE_INITIALIZE_FORK_SAFETY to YES to avoid error") +def handle_sigterm(signum, frame): # noqa: ARG001 + """Handle SIGTERM signal gracefully.""" + logger.info("Received SIGTERM signal. Performing graceful shutdown...") + # Raise SystemExit to trigger graceful shutdown + sys.exit(0) + + @app.command() def run( - host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"), - workers: int = typer.Option(1, help="Number of worker processes.", envvar="LANGFLOW_WORKERS"), - timeout: int = typer.Option(300, help="Worker timeout in seconds.", envvar="LANGFLOW_WORKER_TIMEOUT"), - port: int = typer.Option(7860, help="Port to listen on.", envvar="LANGFLOW_PORT"), - components_path: Optional[Path] = typer.Option( + *, + host: str | None = typer.Option(None, help="Host to bind the server to.", show_default=False), + workers: int | None = typer.Option(None, help="Number of worker processes.", show_default=False), + worker_timeout: int | None = typer.Option(None, help="Worker timeout in seconds.", show_default=False), + port: int | None = typer.Option(None, help="Port to listen on.", show_default=False), + components_path: Path | None = typer.Option( Path(__file__).parent / "components", help="Path to the directory containing custom components.", - envvar="LANGFLOW_COMPONENTS_PATH", + show_default=False, ), # .env file param - env_file: Path = typer.Option(None, help="Path to the .env file containing environment variables."), - log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), - log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE"), - cache: Optional[str] = typer.Option( - envvar="LANGFLOW_LANGCHAIN_CACHE", + env_file: Path | None = typer.Option( + None, + help="Path to the .env file containing environment variables.", + show_default=False, + ), + log_level: str | None = typer.Option(None, help="Logging level.", show_default=False), + log_file: Path | None = typer.Option(None, help="Path to the log file.", show_default=False), + cache: str | None = typer.Option( # noqa: ARG001 + None, help="Type of cache to use. (InMemoryCache, SQLiteCache)", - default=None, + show_default=False, ), - dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"), - path: str = typer.Option( + dev: bool | None = typer.Option(None, help="Run in development mode (may contain bugs)", show_default=False), # noqa: ARG001 + frontend_path: str | None = typer.Option( None, help="Path to the frontend directory containing build files. This is for development purposes only.", - envvar="LANGFLOW_FRONTEND_PATH", + show_default=False, ), - open_browser: bool = typer.Option( - True, + open_browser: bool | None = typer.Option( + None, help="Open the browser after starting the server.", - envvar="LANGFLOW_OPEN_BROWSER", + show_default=False, ), - remove_api_keys: bool = typer.Option( - False, + remove_api_keys: bool | None = typer.Option( # noqa: ARG001 + None, help="Remove API keys from the projects saved in the database.", - envvar="LANGFLOW_REMOVE_API_KEYS", + show_default=False, ), - backend_only: bool = typer.Option( - False, + backend_only: bool | None = typer.Option( + None, help="Run only the backend server without the frontend.", - envvar="LANGFLOW_BACKEND_ONLY", + show_default=False, ), - store: bool = typer.Option( - True, + store: bool | None = typer.Option( # noqa: ARG001 + None, help="Enables the store features.", - envvar="LANGFLOW_STORE", + show_default=False, ), - auto_saving: bool = typer.Option( - True, + auto_saving: bool | None = typer.Option( # noqa: ARG001 + None, help="Defines if the auto save is enabled.", - envvar="LANGFLOW_AUTO_SAVING", + show_default=False, ), - auto_saving_interval: bool = typer.Option( - True, + auto_saving_interval: int | None = typer.Option( # noqa: ARG001 + None, help="Defines the debounce time for the auto save.", - envvar="LANGFLOW_AUTO_SAVING_INTERVAL", + show_default=False, ), - health_check_max_retries: bool = typer.Option( - True, + health_check_max_retries: bool | None = typer.Option( # noqa: ARG001 + None, help="Defines the number of retries for the health check.", - envvar="LANGFLOW_HEALTH_CHECK_MAX_RETRIES", + show_default=False, ), -): - """ - Run Langflow. - """ - - configure(log_level=log_level, log_file=log_file) - set_var_for_macos_issue() + max_file_size_upload: int | None = typer.Option( # noqa: ARG001 + None, + help="Defines the maximum file size for the upload in MB.", + show_default=False, + ), +) -> None: + """Run Langflow.""" + # Register SIGTERM handler + signal.signal(signal.SIGTERM, handle_sigterm) if env_file: load_dotenv(env_file, override=True) - update_settings( - dev=dev, - remove_api_keys=remove_api_keys, - cache=cache, - components_path=components_path, - store=store, - auto_saving=auto_saving, - auto_saving_interval=auto_saving_interval, - health_check_max_retries=health_check_max_retries, - ) - # create path object if path is provided - static_files_dir: Optional[Path] = Path(path) if path else None + configure(log_level=log_level, log_file=log_file) + logger.debug(f"Loading config from file: '{env_file}'" if env_file else "No env_file provided.") + set_var_for_macos_issue() settings_service = get_settings_service() - settings_service.set("backend_only", backend_only) + + frame = inspect.currentframe() + valid_args: list = [] + values: dict = {} + if frame is not None: + arguments, _, _, values = inspect.getargvalues(frame) + valid_args = [arg for arg in arguments if values[arg] is not None] + + for arg in valid_args: + if arg == "components_path": + settings_service.settings.update_settings(components_path=components_path) + elif hasattr(settings_service.settings, arg): + settings_service.set(arg, values[arg]) + logger.debug(f"Loading config from cli parameter '{arg}': '{values[arg]}'") + + host = settings_service.settings.host + port = settings_service.settings.port + workers = settings_service.settings.workers + worker_timeout = settings_service.settings.worker_timeout + log_level = settings_service.settings.log_level + frontend_path = settings_service.settings.frontend_path + backend_only = settings_service.settings.backend_only + + # create path object if frontend_path is provided + static_files_dir: Path | None = Path(frontend_path) if frontend_path else None + app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only) # check if port is being used if is_port_in_use(port, host): port = get_free_port(port) - settings_service.set("worker_timeout", timeout) - options = { "bind": f"{host}:{port}", "workers": get_number_of_workers(workers), - "timeout": timeout, + "timeout": worker_timeout, } # Define an env variable to know if we are just testing the server @@ -178,11 +208,11 @@ def run( return process: Process | None = None try: - if platform.system() in ["Windows"]: + if platform.system() == "Windows": # Run using uvicorn on MacOS and Windows # Windows doesn't support gunicorn # MacOS requires an env variable to be set to use gunicorn - process = run_on_windows(host, port, log_level, options, app) + run_on_windows(host, port, log_level, options, app) else: # Run using gunicorn on Linux process = run_on_mac_or_linux(host, port, log_level, options, app) @@ -190,24 +220,32 @@ def run( click.launch(f"http://{host}:{port}") if process: process.join() - except KeyboardInterrupt: + except (KeyboardInterrupt, SystemExit) as e: + logger.info("Shutting down server...") if process is not None: process.terminate() - sys.exit(0) + process.join(timeout=15) # Wait up to 15 seconds for process to terminate + if process.is_alive(): + logger.warning("Process did not terminate gracefully, forcing...") + process.kill() + raise typer.Exit(0) from e except Exception as e: logger.exception(e) - sys.exit(1) + if process is not None: + process.terminate() + raise typer.Exit(1) from e -def wait_for_server_ready(host, port): - """ - Wait for the server to become ready by polling the health endpoint. - """ +def wait_for_server_ready(host, port) -> None: + """Wait for the server to become ready by polling the health endpoint.""" status_code = 0 - while status_code != 200: + while status_code != httpx.codes.OK: try: status_code = httpx.get(f"http://{host}:{port}/health").status_code - except Exception: + except HTTPError: + time.sleep(1) + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error while waiting for the server to become ready.") time.sleep(1) @@ -220,18 +258,14 @@ def run_on_mac_or_linux(host, port, log_level, options, app): return webapp_process -def run_on_windows(host, port, log_level, options, app): - """ - Run the Langflow server on Windows. - """ +def run_on_windows(host, port, log_level, options, app) -> None: + """Run the Langflow server on Windows.""" print_banner(host, port) run_langflow(host, port, log_level, options, app) - return None def is_port_in_use(port, host="localhost"): - """ - Check if a port is in use. + """Check if a port is in use. Args: port (int): The port number to check. @@ -245,8 +279,7 @@ def is_port_in_use(port, host="localhost"): def get_free_port(port): - """ - Given a used port, find a free port. + """Given a used port, find a free port. Args: port (int): The port number to check. @@ -259,17 +292,8 @@ def get_free_port(port): return port -def version_is_prerelease(version: str): - """ - Check if a version is a pre-release version. - """ - return "a" in version or "b" in version or "rc" in version - - -def get_letter_from_version(version: str): - """ - Get the letter from a pre-release version. - """ +def get_letter_from_version(version: str) -> str | None: + """Get the letter from a pre-release version.""" if "a" in version: return "a" if "b" in version: @@ -279,87 +303,46 @@ def get_letter_from_version(version: str): return None -def is_prerelease(version: str) -> bool: - return "a" in version or "b" in version or "rc" in version - - -def fetch_latest_version(package_name: str, include_prerelease: bool) -> Optional[str]: - valid_versions = [] - try: - response = httpx.get(f"https://pypi.org/pypi/{package_name}/json") - versions = response.json()["releases"].keys() - valid_versions = [v for v in versions if include_prerelease or not is_prerelease(v)] - - except Exception as e: - logger.exception(e) - - finally: - if not valid_versions: - return None # Handle case where no valid versions are found - return max(valid_versions, key=lambda v: pkg_version.parse(v)) - - def build_version_notice(current_version: str, package_name: str) -> str: - latest_version = fetch_latest_version(package_name, is_prerelease(current_version)) + latest_version = fetch_latest_version(package_name, include_prerelease=langflow_is_pre_release(current_version)) if latest_version and pkg_version.parse(current_version) < pkg_version.parse(latest_version): - release_type = "pre-release" if is_prerelease(latest_version) else "version" + release_type = "pre-release" if langflow_is_pre_release(latest_version) else "version" return f"A new {release_type} of {package_name} is available: {latest_version}" return "" -def generate_pip_command(package_names, is_pre_release): - """ - Generate the pip install command based on the packages and whether it's a pre-release. - """ +def generate_pip_command(package_names, is_pre_release) -> str: + """Generate the pip install command based on the packages and whether it's a pre-release.""" base_command = "pip install" if is_pre_release: return f"{base_command} {' '.join(package_names)} -U --pre" - else: - return f"{base_command} {' '.join(package_names)} -U" + return f"{base_command} {' '.join(package_names)} -U" -def stylize_text(text: str, to_style: str, is_prerelease: bool) -> str: +def stylize_text(text: str, to_style: str, *, is_prerelease: bool) -> str: color = "#42a7f5" if is_prerelease else "#6e42f5" # return "".join(f"[{color}]{char}[/]" for char in text) styled_text = f"[{color}]{to_style}[/]" return text.replace(to_style, styled_text) -def print_banner(host: str, port: int): +def print_banner(host: str, port: int) -> None: notices = [] package_names = [] # Track package names for pip install instructions is_pre_release = False # Track if any package is a pre-release package_name = "" - try: - from langflow.version import __version__ as langflow_version # type: ignore - - is_pre_release |= is_prerelease(langflow_version) # Update pre-release status - notice = build_version_notice(langflow_version, "langflow") - notice = stylize_text(notice, "langflow", is_pre_release) - if notice: - notices.append(notice) - package_names.append("langflow") - package_name = "Langflow" - except ImportError: - langflow_version = None - - # Attempt to handle langflow-base similarly - if langflow_version is None: # This means langflow.version was not imported - try: - from importlib import metadata - - langflow_base_version = metadata.version("langflow-base") - is_pre_release |= is_prerelease(langflow_base_version) # Update pre-release status - notice = build_version_notice(langflow_base_version, "langflow-base") - notice = stylize_text(notice, "langflow-base", is_pre_release) - if notice: - notices.append(notice) - package_names.append("langflow-base") - package_name = "Langflow Base" - except ImportError as e: - logger.exception(e) - raise e + # Use langflow.utils.version to get the version info + version_info = get_version_info() + langflow_version = version_info["version"] + package_name = version_info["package"] + is_pre_release |= langflow_is_pre_release(langflow_version) # Update pre-release status + + notice = build_version_notice(langflow_version, package_name) + notice = stylize_text(notice, package_name, is_prerelease=is_pre_release) + if notice: + notices.append(notice) + package_names.append(package_name) # Generate pip command based on the collected data pip_command = generate_pip_command(package_names, is_pre_release) @@ -369,11 +352,19 @@ def print_banner(host: str, port: int): notices.append(f"Run '{pip_command}' to update.") styled_notices = [f"[bold]{notice}[/bold]" for notice in notices if notice] - styled_package_name = stylize_text(package_name, package_name, any("pre-release" in notice for notice in notices)) + styled_package_name = stylize_text( + package_name, package_name, is_prerelease=any("pre-release" in notice for notice in notices) + ) title = f"[bold]Welcome to :chains: {styled_package_name}[/bold]\n" - info_text = "Collaborate, and contribute at our [bold][link=https://github.com/langflow-ai/langflow]GitHub Repo[/link][/bold] :star2:" - telemetry_text = "We collect anonymous usage data to improve Langflow.\nYou can opt-out by setting [bold]DO_NOT_TRACK=true[/bold] in your environment." + info_text = ( + "Collaborate, and contribute at our " + "[bold][link=https://github.com/langflow-ai/langflow]GitHub Repo[/link][/bold] :star2:" + ) + telemetry_text = ( + "We collect anonymous usage data to improve Langflow.\n" + "You can opt-out by setting [bold]DO_NOT_TRACK=true[/bold] in your environment." + ) access_link = f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]" panel_content = "\n\n".join([title, *styled_notices, info_text, telemetry_text, access_link]) @@ -381,15 +372,9 @@ def print_banner(host: str, port: int): rprint(panel) -def run_langflow(host, port, log_level, options, app): - """ - Run Langflow server on localhost - """ - - if platform.system() in ["Windows"]: - # Run using uvicorn on MacOS and Windows - # Windows doesn't support gunicorn - # MacOS requires an env variable to be set to use gunicorn +def run_langflow(host, port, log_level, options, app) -> None: + """Run Langflow server on localhost.""" + if platform.system() == "Windows": import uvicorn uvicorn.run( @@ -402,7 +387,28 @@ def run_langflow(host, port, log_level, options, app): else: from langflow.server import LangflowApplication - LangflowApplication(app, options).run() + server = LangflowApplication(app, options) + + def graceful_shutdown(signum, frame): # noqa: ARG001 + """Gracefully shutdown the server when receiving SIGTERM.""" + # Suppress click exceptions during shutdown + import click + + click.echo = lambda *args, **kwargs: None # noqa: ARG005 + + logger.info("Gracefully shutting down server...") + # For Gunicorn workers, we raise SystemExit to trigger graceful shutdown + raise SystemExit(0) + + # Register signal handlers + signal.signal(signal.SIGTERM, graceful_shutdown) + signal.signal(signal.SIGINT, graceful_shutdown) + + try: + server.run() + except (KeyboardInterrupt, SystemExit): + # Suppress the exception output + sys.exit(0) @app.command() @@ -410,10 +416,8 @@ def superuser( username: str = typer.Option(..., prompt=True, help="Username for the superuser."), password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."), log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), -): - """ - Create a superuser. - """ +) -> None: + """Create a superuser.""" configure(log_level=log_level) initialize_services() db_service = get_db_service() @@ -433,7 +437,8 @@ def superuser( if result: typer.echo("Default folder created successfully.") else: - raise RuntimeError("Could not create default folder.") + msg = "Could not create default folder." + raise RuntimeError(msg) typer.echo("Superuser created successfully.") else: @@ -443,11 +448,11 @@ def superuser( # command to copy the langflow database from the cache to the current directory # because now the database is stored per installation @app.command() -def copy_db(): - """ - Copy the database files to the current directory. +def copy_db() -> None: + """Copy the database files to the current directory. - This function copies the 'langflow.db' and 'langflow-pre.db' files from the cache directory to the current directory. + This function copies the 'langflow.db' and 'langflow-pre.db' files from the cache directory to the current + directory. If the files exist in the cache directory, they will be copied to the same directory as this script (__main__.py). Returns: @@ -477,20 +482,17 @@ def copy_db(): @app.command() def migration( - test: bool = typer.Option(True, help="Run migrations in test mode."), - fix: bool = typer.Option( - False, + test: bool = typer.Option(default=True, help="Run migrations in test mode."), # noqa: FBT001 + fix: bool = typer.Option( # noqa: FBT001 + default=False, help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.", ), -): - """ - Run or test migrations. - """ - if fix: - if not typer.confirm( - "This will delete all data necessary to fix migrations. Are you sure you want to continue?" - ): - raise typer.Abort() +) -> None: + """Run or test migrations.""" + if fix and not typer.confirm( + "This will delete all data necessary to fix migrations. Are you sure you want to continue?" + ): + raise typer.Abort initialize_services(fix_migration=fix) db_service = get_db_service() @@ -502,10 +504,9 @@ def migration( @app.command() def api_key( - log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), -): - """ - Creates an API key for the default superuser if AUTO_LOGIN is enabled. + log_level: str = typer.Option("error", help="Logging level."), +) -> None: + """Creates an API key for the default superuser if AUTO_LOGIN is enabled. Args: log_level (str, optional): Logging level. Defaults to "error". @@ -520,30 +521,61 @@ def api_key( if not auth_settings.AUTO_LOGIN: typer.echo("Auto login is disabled. API keys cannot be created through the CLI.") return - with session_scope() as session: - from langflow.services.database.models.user.model import User - superuser = session.exec(select(User).where(User.username == DEFAULT_SUPERUSER)).first() - if not superuser: - typer.echo("Default superuser not found. This command requires a superuser and AUTO_LOGIN to be enabled.") - return - from langflow.services.database.models.api_key import ApiKey, ApiKeyCreate - from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key - - api_key = session.exec(select(ApiKey).where(ApiKey.user_id == superuser.id)).first() - if api_key: - delete_api_key(session, api_key.id) + async def aapi_key(): + async with async_session_scope() as session: + from langflow.services.database.models.user.model import User - api_key_create = ApiKeyCreate(name="CLI") - unmasked_api_key = create_api_key(session, api_key_create, user_id=superuser.id) - session.commit() - # Create a banner to display the API key and tell the user it won't be shown again - api_key_banner(unmasked_api_key) + superuser = (await session.exec(select(User).where(User.username == DEFAULT_SUPERUSER))).first() + if not superuser: + typer.echo( + "Default superuser not found. This command requires a superuser and AUTO_LOGIN to be enabled." + ) + return None + from langflow.services.database.models.api_key import ApiKey, ApiKeyCreate + from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key + + api_key = (await session.exec(select(ApiKey).where(ApiKey.user_id == superuser.id))).first() + if api_key: + await delete_api_key(session, api_key.id) + + api_key_create = ApiKeyCreate(name="CLI") + unmasked_api_key = await create_api_key(session, api_key_create, user_id=superuser.id) + await session.commit() + return unmasked_api_key + + unmasked_api_key = asyncio.run(aapi_key()) + # Create a banner to display the API key and tell the user it won't be shown again + api_key_banner(unmasked_api_key) + + +def show_version(*, value: bool): + if value: + default = "DEV" + raw_info = get_version_info() + version = raw_info.get("version", default) if raw_info else default + typer.echo(f"langflow {version}") + raise typer.Exit + + +@app.callback() +def version_option( + *, + version: bool = typer.Option( + None, + "--version", + "-v", + callback=show_version, + is_eager=True, + help="Show the version and exit.", + ), +): + pass -def api_key_banner(unmasked_api_key): +def api_key_banner(unmasked_api_key) -> None: is_mac = platform.system() == "Darwin" - import pyperclip # type: ignore + import pyperclip pyperclip.copy(unmasked_api_key.api_key) panel = Panel( @@ -551,7 +583,7 @@ def api_key_banner(unmasked_api_key): f"[bold blue]{unmasked_api_key.api_key}[/bold blue]\n\n" "This is the only time the API key will be displayed. \n" "Make sure to store it in a secure location. \n\n" - f"The API key has been copied to your clipboard. [bold]{['Ctrl','Cmd'][is_mac]} + V[/bold] to paste it.", + f"The API key has been copied to your clipboard. [bold]{['Ctrl', 'Cmd'][is_mac]} + V[/bold] to paste it.", box=box.ROUNDED, border_style="blue", expand=False, @@ -560,11 +592,15 @@ def api_key_banner(unmasked_api_key): console.print(panel) -def main(): +def main() -> None: with warnings.catch_warnings(): warnings.simplefilter("ignore") app() if __name__ == "__main__": - main() + try: + main() + except Exception as e: + logger.exception(e) + raise typer.Exit(1) from e diff --git a/src/backend/base/langflow/alembic/env.py b/src/backend/base/langflow/alembic/env.py index 7400906c85bc..d4e1306492da 100644 --- a/src/backend/base/langflow/alembic/env.py +++ b/src/backend/base/langflow/alembic/env.py @@ -1,12 +1,9 @@ -import os -import warnings from logging.config import fileConfig from alembic import context -from loguru import logger from sqlalchemy import engine_from_config, pool -from langflow.services.database.models import * # noqa +from langflow.services.database.models import * from langflow.services.database.service import SQLModel # this is the Alembic Config object, which provides @@ -42,8 +39,7 @@ def run_migrations_offline() -> None: script output. """ - url = os.getenv("LANGFLOW_DATABASE_URL") - url = url or config.get_main_option("sqlalchemy.url") + url = config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, @@ -63,32 +59,17 @@ def run_migrations_online() -> None: and associate a connection with the context. """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True) - try: - from langflow.services.database.factory import DatabaseServiceFactory - from langflow.services.deps import get_db_service - from langflow.services.manager import initialize_settings_service, service_manager - - initialize_settings_service() - service_manager.register_factory(DatabaseServiceFactory()) - connectable = get_db_service().engine - except Exception as e: - logger.error(f"Error getting database engine: {e}") - url = os.getenv("LANGFLOW_DATABASE_URL") - url = url or config.get_main_option("sqlalchemy.url") - if url: - config.set_main_option("sqlalchemy.url", url) - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True) - with context.begin_transaction(): - context.run_migrations() + with context.begin_transaction(): + context.run_migrations() if context.is_offline_mode(): diff --git a/src/backend/base/langflow/alembic/versions/0ae3a2674f32_update_the_columns_that_need_to_change_.py b/src/backend/base/langflow/alembic/versions/0ae3a2674f32_update_the_columns_that_need_to_change_.py new file mode 100644 index 000000000000..cc5160a49213 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/0ae3a2674f32_update_the_columns_that_need_to_change_.py @@ -0,0 +1,70 @@ +"""Update the columns that need to change their type to text + +Revision ID: 0ae3a2674f32 +Revises: d2d475a1f7c0 +Create Date: 2024-10-04 17:30:12.924809 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +from sqlalchemy.engine.reflection import Inspector +from langflow.utils import migration +from sqlalchemy.dialects import sqlite + +# revision identifiers, used by Alembic. +revision: str = '0ae3a2674f32' +down_revision: Union[str, None] = 'd2d475a1f7c0' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +def upgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + + with op.batch_alter_table("vertex_build", schema=None) as batch_op: + if migration.column_exists(table_name="vertex_build", column_name="params", conn=conn): + columns = inspector.get_columns("vertex_build") + params_column = next((column for column in columns if column["name"] == "params"), None) + if params_column is not None and isinstance(params_column["type"], sa.VARCHAR): + batch_op.alter_column( + "params", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + with op.batch_alter_table("message", schema=None) as batch_op: + if migration.column_exists(table_name="message", column_name="text", conn=conn): + columns = inspector.get_columns("message") + text_column = next((column for column in columns if column["name"] == "text"), None) + if text_column is not None and isinstance(text_column["type"], sa.VARCHAR): + batch_op.alter_column( + "text", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + with op.batch_alter_table("message", schema=None) as batch_op: + if migration.column_exists(table_name="message", column_name="text", conn=conn): + columns = inspector.get_columns("message") + text_column = next((column for column in columns if column["name"] == "text"), None) + if text_column is not None and isinstance(text_column["type"], sa.VARCHAR): + batch_op.alter_column( + "text", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + with op.batch_alter_table("vertex_build", schema=None) as batch_op: + if migration.column_exists(table_name="vertex_build", column_name="params", conn=conn): + columns = inspector.get_columns("vertex_build") + params_column = next((column for column in columns if column["name"] == "params"), None) + if params_column is not None and isinstance(params_column["type"], sa.VARCHAR): + batch_op.alter_column( + "params", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + # ### end Alembic commands ### \ No newline at end of file diff --git a/src/backend/base/langflow/alembic/versions/1d90f8a0efe1_update_description_columns_type.py b/src/backend/base/langflow/alembic/versions/1d90f8a0efe1_update_description_columns_type.py new file mode 100644 index 000000000000..1f9426a5bbb4 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/1d90f8a0efe1_update_description_columns_type.py @@ -0,0 +1,70 @@ +"""Update description columns type + +Revision ID: 4522eb831f5c +Revises: 0d60fcbd4e8e +Create Date: 2024-08-20 11:46:56.266061 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from langflow.utils import migration +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "4522eb831f5c" +down_revision: Union[str, None] = "0d60fcbd4e8e" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + + with op.batch_alter_table("flow", schema=None) as batch_op: + if migration.column_exists(table_name="flow", column_name="description", conn=conn): + columns = inspector.get_columns("flow") + description_column = next((column for column in columns if column["name"] == "description"), None) + if description_column is not None and isinstance(description_column["type"], sa.VARCHAR): + batch_op.alter_column( + "description", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + with op.batch_alter_table("folder", schema=None) as batch_op: + if migration.column_exists(table_name="folder", column_name="description", conn=conn): + columns = inspector.get_columns("folder") + description_column = next((column for column in columns if column["name"] == "description"), None) + if description_column is not None and isinstance(description_column["type"], sa.VARCHAR): + batch_op.alter_column( + "description", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + with op.batch_alter_table("folder", schema=None) as batch_op: + if migration.column_exists(table_name="folder", column_name="description", conn=conn): + columns = inspector.get_columns("folder") + description_column = next((column for column in columns if column["name"] == "description"), None) + if description_column is not None and isinstance(description_column["type"], sa.VARCHAR): + batch_op.alter_column( + "description", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + + with op.batch_alter_table("flow", schema=None) as batch_op: + if migration.column_exists(table_name="flow", column_name="description", conn=conn): + columns = inspector.get_columns("flow") + description_column = next((column for column in columns if column["name"] == "description"), None) + if description_column is not None and isinstance(description_column["type"], sa.VARCHAR): + batch_op.alter_column( + "description", existing_type=sa.VARCHAR(), type_=sa.Text(), existing_nullable=True + ) + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/1eab2c3eb45e_event_error.py b/src/backend/base/langflow/alembic/versions/1eab2c3eb45e_event_error.py new file mode 100644 index 000000000000..7647a71731d2 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/1eab2c3eb45e_event_error.py @@ -0,0 +1,53 @@ +"""event_error + +Revision ID: 1eab2c3eb45e +Revises: eb5e72293a8e +Create Date: 2024-10-24 12:03:24.118937 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import sqlite +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = '1eab2c3eb45e' +down_revision: Union[str, None] = 'eb5e72293a8e' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('message', schema=None) as batch_op: + if "properties" not in column_names: + batch_op.add_column(sa.Column('properties', sa.JSON(), nullable=True)) + if "category" not in column_names: + batch_op.add_column(sa.Column('category', sa.Text(), nullable=True)) + if "content_blocks" not in column_names: + batch_op.add_column(sa.Column('content_blocks', sa.JSON(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('message', schema=None) as batch_op: + if "content_blocks" in column_names: + batch_op.drop_column('content_blocks') + if "category" in column_names: + batch_op.drop_column('category') + if "properties" in column_names: + batch_op.drop_column('properties') + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/5ace73a7f223_new_remove_table_upgrade_op.py b/src/backend/base/langflow/alembic/versions/5ace73a7f223_new_remove_table_upgrade_op.py new file mode 100644 index 000000000000..f2b8f9bff1cc --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/5ace73a7f223_new_remove_table_upgrade_op.py @@ -0,0 +1,39 @@ +"""new remove table upgrade op + +Revision ID: 5ace73a7f223 +Revises: 0ae3a2674f32 +Create Date: 2024-10-08 10:59:12.980671 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +from sqlalchemy.engine.reflection import Inspector +from langflow.utils import migration +from sqlalchemy.dialects import sqlite +from langflow.utils import migration + +# revision identifiers, used by Alembic. +revision: str = "5ace73a7f223" +down_revision: Union[str, None] = "0ae3a2674f32" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + + with op.batch_alter_table("message", schema=None) as batch_op: + batch_op.alter_column("text", existing_type=sa.TEXT(), nullable=True) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("message", schema=None) as batch_op: + batch_op.alter_column("text", existing_type=sa.TEXT(), nullable=False) + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/d2d475a1f7c0_add_tags_column_to_flow.py b/src/backend/base/langflow/alembic/versions/d2d475a1f7c0_add_tags_column_to_flow.py new file mode 100644 index 000000000000..d314930ff96a --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/d2d475a1f7c0_add_tags_column_to_flow.py @@ -0,0 +1,41 @@ +"""add tags column to flow + +Revision ID: d2d475a1f7c0 +Revises: d3dbf656a499 +Create Date: 2024-10-03 13:33:59.517261 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +from langflow.utils import migration + +# revision identifiers, used by Alembic. +revision: str = 'd2d475a1f7c0' +down_revision: Union[str, None] = 'd3dbf656a499' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('flow', schema=None) as batch_op: + if not migration.column_exists(table_name='flow', column_name='tags', conn=conn): + batch_op.add_column(sa.Column('tags', sa.JSON(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('flow', schema=None) as batch_op: + if migration.column_exists(table_name='flow', column_name='tags', conn=conn): + batch_op.drop_column('tags') + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/d3dbf656a499_add_gradient_column_in_flow.py b/src/backend/base/langflow/alembic/versions/d3dbf656a499_add_gradient_column_in_flow.py new file mode 100644 index 000000000000..b40c63d464fb --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/d3dbf656a499_add_gradient_column_in_flow.py @@ -0,0 +1,41 @@ +"""add gradient column in Flow + +Revision ID: d3dbf656a499 +Revises: e5a65ecff2cd +Create Date: 2024-09-27 09:35:19.424089 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +from langflow.utils import migration + +# revision identifiers, used by Alembic. +revision: str = 'd3dbf656a499' +down_revision: Union[str, None] = 'e5a65ecff2cd' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('flow', schema=None) as batch_op: + if not migration.column_exists(table_name='flow', column_name='gradient', conn=conn): + batch_op.add_column(sa.Column('gradient', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('flow', schema=None) as batch_op: + if migration.column_exists(table_name='flow', column_name='gradient', conn=conn): + batch_op.drop_column('gradient') + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/e5a65ecff2cd_nullable_in_vertex_build.py b/src/backend/base/langflow/alembic/versions/e5a65ecff2cd_nullable_in_vertex_build.py new file mode 100644 index 000000000000..099361256714 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/e5a65ecff2cd_nullable_in_vertex_build.py @@ -0,0 +1,49 @@ +"""nullable in vertex build + +Revision ID: e5a65ecff2cd +Revises: 4522eb831f5c +Create Date: 2024-09-02 14:55:19.707355 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +from langflow.utils import migration + +# revision identifiers, used by Alembic. +revision: str = "e5a65ecff2cd" +down_revision: Union[str, None] = "4522eb831f5c" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + with op.batch_alter_table("vertex_build", schema=None) as batch_op: + if migration.column_exists(table_name="vertex_build", column_name="id", conn=conn): + columns = inspector.get_columns("vertex_build") + id_column = next((column for column in columns if column["name"] == "id"), None) + if id_column is not None and id_column["nullable"]: + batch_op.alter_column("id", existing_type=sa.VARCHAR(), nullable=False) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + # ### commands auto generated by Alembic - please adjust! ### + inspector = Inspector.from_engine(conn) # type: ignore + with op.batch_alter_table("vertex_build", schema=None) as batch_op: + if migration.column_exists(table_name="vertex_build", column_name="id", conn=conn): + columns = inspector.get_columns("vertex_build") + id_column = next((column for column in columns if column["name"] == "id"), None) + if id_column is not None and not id_column["nullable"]: + batch_op.alter_column("id", existing_type=sa.VARCHAR(), nullable=True) + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py b/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py new file mode 100644 index 000000000000..1939cd0496d7 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/eb5e72293a8e_add_error_and_edit_flags_to_message.py @@ -0,0 +1,49 @@ +"""Add error and edit flags to message + +Revision ID: eb5e72293a8e +Revises: 5ace73a7f223 +Create Date: 2024-09-19 16:18:50.828648 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "eb5e72293a8e" +down_revision: Union[str, None] = "5ace73a7f223" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("message", schema=None) as batch_op: + if "error" not in column_names: + batch_op.add_column(sa.Column("error", sa.Boolean(), nullable=False, server_default=sa.false())) + if "edit" not in column_names: + batch_op.add_column(sa.Column("edit", sa.Boolean(), nullable=False, server_default=sa.false())) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("message")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("message", schema=None) as batch_op: + if "edit" in column_names: + batch_op.drop_column("edit") + if "error" in column_names: + batch_op.drop_column("error") + + # ### end Alembic commands ### diff --git a/src/backend/base/langflow/api/__init__.py b/src/backend/base/langflow/api/__init__.py index 64d72c61d36c..8150f0d9cac5 100644 --- a/src/backend/base/langflow/api/__init__.py +++ b/src/backend/base/langflow/api/__init__.py @@ -1,5 +1,5 @@ -from langflow.api.router import router from langflow.api.health_check_router import health_check_router from langflow.api.log_router import log_router +from langflow.api.router import router -__all__ = ["router", "health_check_router", "log_router"] +__all__ = ["health_check_router", "log_router", "router"] diff --git a/src/backend/base/langflow/api/health_check_router.py b/src/backend/base/langflow/api/health_check_router.py index 9e3cdc8421e8..8479fc736010 100644 --- a/src/backend/base/langflow/api/health_check_router.py +++ b/src/backend/base/langflow/api/health_check_router.py @@ -1,12 +1,13 @@ import uuid -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, HTTPException, status from loguru import logger from pydantic import BaseModel -from sqlmodel import Session, select +from sqlmodel import select +from langflow.api.utils import DbSession from langflow.services.database.models.flow import Flow -from langflow.services.deps import get_chat_service, get_session +from langflow.services.deps import get_chat_service health_check_router = APIRouter(tags=["Health Check"]) @@ -16,7 +17,8 @@ class HealthResponse(BaseModel): chat: str = "error check the server logs" db: str = "error check the server logs" """ - Do not send exceptions and detailed error messages to the client because it might contain credentials and other sensitive server information. + Do not send exceptions and detailed error messages to the client because it might contain credentials and other + sensitive server information. """ def has_error(self) -> bool: @@ -34,10 +36,10 @@ async def health(): # /health_check evaluates key services # It's a reliable health check for a langflow instance -@health_check_router.get("/health_check", response_model=HealthResponse) +@health_check_router.get("/health_check") async def health_check( - session: Session = Depends(get_session), -): + session: DbSession, +) -> HealthResponse: response = HealthResponse() # use a fixed valid UUId that UUID collision is very unlikely user_id = "da93c2bd-c857-4b10-8c8c-60988103320f" @@ -46,19 +48,18 @@ async def health_check( stmt = select(Flow).where(Flow.id == uuid.uuid4()) session.exec(stmt).first() response.db = "ok" - except Exception as e: - logger.exception(e) + except Exception: # noqa: BLE001 + logger.exception("Error checking database") try: chat = get_chat_service() await chat.set_cache("health_check", str(user_id)) await chat.get_cache("health_check") response.chat = "ok" - except Exception as e: - logger.exception(e) + except Exception: # noqa: BLE001 + logger.exception("Error checking chat service") if response.has_error(): raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=response.model_dump()) - else: - response.status = "ok" - return response + response.status = "ok" + return response diff --git a/src/backend/base/langflow/api/log_router.py b/src/backend/base/langflow/api/log_router.py index 4e4aa9a7dbad..3b3af73cd9b2 100644 --- a/src/backend/base/langflow/api/log_router.py +++ b/src/backend/base/langflow/api/log_router.py @@ -1,21 +1,25 @@ import asyncio import json -from typing import List, Any +from http import HTTPStatus +from typing import Annotated, Any -from fastapi import APIRouter, Query, HTTPException, Request +from fastapi import APIRouter, HTTPException, Query, Request from fastapi.responses import JSONResponse, StreamingResponse -from http import HTTPStatus + from langflow.logging.logger import log_buffer log_router = APIRouter(tags=["Log"]) +NUMBER_OF_NOT_SENT_BEFORE_KEEPALIVE = 5 + + async def event_generator(request: Request): - global log_buffer + global log_buffer # noqa: PLW0602 last_read_item = None current_not_sent = 0 while not await request.is_disconnected(): - to_write: List[Any] = [] + to_write: list[Any] = [] with log_buffer.get_write_lock(): if last_read_item is None: last_read_item = log_buffer.buffer[len(log_buffer.buffer) - 1] @@ -37,10 +41,10 @@ async def event_generator(request: Request): last_read_item = item if to_write: for ts, msg in to_write: - yield f"{json.dumps({ts:msg})}\n\n" + yield f"{json.dumps({ts: msg})}\n\n" else: current_not_sent += 1 - if current_not_sent == 5: + if current_not_sent == NUMBER_OF_NOT_SENT_BEFORE_KEEPALIVE: current_not_sent = 0 yield "keepalive\n\n" @@ -51,12 +55,12 @@ async def event_generator(request: Request): async def stream_logs( request: Request, ): + """HTTP/2 Server-Sent-Event (SSE) endpoint for streaming logs. + + It establishes a long-lived connection to the server and receives log messages in real-time. + The client should use the header "Accept: text/event-stream". """ - HTTP/2 Server-Sent-Event (SSE) endpoint for streaming logs - it establishes a long-lived connection to the server and receives log messages in real-time - the client should use the head "Accept: text/event-stream" - """ - global log_buffer + global log_buffer # noqa: PLW0602 if log_buffer.enabled() is False: raise HTTPException( status_code=HTTPStatus.NOT_IMPLEMENTED, @@ -68,11 +72,11 @@ async def stream_logs( @log_router.get("/logs") async def logs( - lines_before: int = Query(0, description="The number of logs before the timestamp or the last log"), - lines_after: int = Query(0, description="The number of logs after the timestamp"), - timestamp: int = Query(0, description="The timestamp to start getting logs from"), + lines_before: Annotated[int, Query(description="The number of logs before the timestamp or the last log")] = 0, + lines_after: Annotated[int, Query(description="The number of logs after the timestamp")] = 0, + timestamp: Annotated[int, Query(description="The timestamp to start getting logs from")] = 0, ): - global log_buffer + global log_buffer # noqa: PLW0602 if log_buffer.enabled() is False: raise HTTPException( status_code=HTTPStatus.NOT_IMPLEMENTED, @@ -89,15 +93,11 @@ async def logs( status_code=HTTPStatus.BAD_REQUEST, detail="Timestamp is required when requesting logs after the timestamp", ) - if lines_before <= 0: - content = log_buffer.get_last_n(10) - else: - content = log_buffer.get_last_n(lines_before) + content = log_buffer.get_last_n(10) if lines_before <= 0 else log_buffer.get_last_n(lines_before) + elif lines_before > 0: + content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=lines_before) + elif lines_after > 0: + content = log_buffer.get_after_timestamp(timestamp=timestamp, lines=lines_after) else: - if lines_before > 0: - content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=lines_before) - elif lines_after > 0: - content = log_buffer.get_after_timestamp(timestamp=timestamp, lines=lines_after) - else: - content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=10) + content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=10) return JSONResponse(content=content) diff --git a/src/backend/base/langflow/api/router.py b/src/backend/base/langflow/api/router.py index 8e5ce927f385..d2ce1905ada0 100644 --- a/src/backend/base/langflow/api/router.py +++ b/src/backend/base/langflow/api/router.py @@ -7,14 +7,14 @@ endpoints_router, files_router, flows_router, + folders_router, login_router, monitor_router, + starter_projects_router, store_router, users_router, validate_router, variables_router, - folders_router, - starter_projects_router, ) router = APIRouter( diff --git a/src/backend/base/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py index 6c143b3f0a23..f7c34661b937 100644 --- a/src/backend/base/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -1,22 +1,39 @@ +from __future__ import annotations + import uuid -import warnings -from typing import TYPE_CHECKING, Any, Optional, Dict +from datetime import timedelta +from typing import TYPE_CHECKING, Annotated, Any -from fastapi import HTTPException +from fastapi import Depends, HTTPException, Query +from fastapi_pagination import Params +from loguru import logger +from sqlalchemy import delete from sqlmodel import Session +from sqlmodel.ext.asyncio.session import AsyncSession from langflow.graph.graph.base import Graph -from langflow.services.chat.service import ChatService +from langflow.services.auth.utils import get_current_active_user +from langflow.services.database.models import User from langflow.services.database.models.flow import Flow -from langflow.services.store.schema import StoreComponentCreate +from langflow.services.database.models.transactions.model import TransactionTable +from langflow.services.database.models.vertex_builds.model import VertexBuildTable +from langflow.services.deps import get_async_session, get_session from langflow.services.store.utils import get_lf_version_from_pypi if TYPE_CHECKING: - from langflow.services.database.models.flow.model import Flow + from langflow.services.chat.service import ChatService + from langflow.services.store.schema import StoreComponentCreate API_WORDS = ["api", "key", "token"] +MAX_PAGE_SIZE = 50 +MIN_PAGE_SIZE = 1 + +CurrentActiveUser = Annotated[User, Depends(get_current_active_user)] +DbSession = Annotated[Session, Depends(get_session)] +AsyncDbSession = Annotated[AsyncSession, Depends(get_async_session)] + def has_api_terms(word: str): return "api" in word and ("key" in word or ("token" in word and "tokens" not in word)) @@ -37,9 +54,8 @@ def remove_api_keys(flow: dict): def build_input_keys_response(langchain_object, artifacts): """Build the input keys response.""" - input_keys_response = { - "input_keys": {key: "" for key in langchain_object.input_keys}, + "input_keys": dict.fromkeys(langchain_object.input_keys, ""), "memory_keys": [], "handle_keys": artifacts.get("handle_keys", []), } @@ -66,7 +82,7 @@ def build_input_keys_response(langchain_object, artifacts): return input_keys_response -def validate_is_component(flows: list["Flow"]): +def validate_is_component(flows: list[Flow]): for flow in flows: if not flow.data or flow.is_component is not None: continue @@ -84,17 +100,19 @@ def get_is_component_from_data(data: dict): return data.get("is_component") -async def check_langflow_version(component: StoreComponentCreate): - from langflow.version.version import __version__ as current_version # type: ignore +def check_langflow_version(component: StoreComponentCreate) -> None: + from langflow.utils.version import get_version_info + + __version__ = get_version_info()["version"] if not component.last_tested_version: - component.last_tested_version = current_version + component.last_tested_version = __version__ langflow_version = get_lf_version_from_pypi() if langflow_version is None: raise HTTPException(status_code=500, detail="Unable to verify the latest version of Langflow") - elif langflow_version != component.last_tested_version: - warnings.warn( + if langflow_version != component.last_tested_version: + logger.warning( f"Your version of Langflow ({component.last_tested_version}) is outdated. " f"Please update to the latest version ({langflow_version}) and try again." ) @@ -107,29 +125,32 @@ def format_elapsed_time(elapsed_time: float) -> str: - Less than 1 minute: returns seconds rounded to 2 decimals - 1 minute or more: returns minutes and seconds """ - if elapsed_time < 1: - milliseconds = int(round(elapsed_time * 1000)) + delta = timedelta(seconds=elapsed_time) + if delta < timedelta(seconds=1): + milliseconds = round(delta / timedelta(milliseconds=1)) return f"{milliseconds} ms" - elif elapsed_time < 60: + + if delta < timedelta(minutes=1): seconds = round(elapsed_time, 2) unit = "second" if seconds == 1 else "seconds" return f"{seconds} {unit}" - else: - minutes = int(elapsed_time // 60) - seconds = round(elapsed_time % 60, 2) - minutes_unit = "minute" if minutes == 1 else "minutes" - seconds_unit = "second" if seconds == 1 else "seconds" - return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}" + + minutes = delta // timedelta(minutes=1) + seconds = round((delta - timedelta(minutes=minutes)).total_seconds(), 2) + minutes_unit = "minute" if minutes == 1 else "minutes" + seconds_unit = "second" if seconds == 1 else "seconds" + return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}" -async def build_graph_from_data(flow_id: str, payload: Dict, **kwargs): +async def build_graph_from_data(flow_id: str, payload: dict, **kwargs): """Build and cache the graph.""" graph = Graph.from_payload(payload, flow_id, **kwargs) - for vertex_id in graph._has_session_id_vertices: + for vertex_id in graph.has_session_id_vertices: vertex = graph.get_vertex(vertex_id) if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") - if not vertex._raw_params.get("session_id"): + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) + if not vertex.raw_params.get("session_id"): vertex.update_raw_params({"session_id": flow_id}, overwrite=True) run_id = uuid.uuid4() @@ -141,13 +162,14 @@ async def build_graph_from_data(flow_id: str, payload: Dict, **kwargs): async def build_graph_from_db_no_cache(flow_id: str, session: Session): """Build and cache the graph.""" - flow: Optional[Flow] = session.get(Flow, flow_id) + flow: Flow | None = session.get(Flow, flow_id) if not flow or not flow.data: - raise ValueError("Invalid flow ID") + msg = "Invalid flow ID" + raise ValueError(msg) return await build_graph_from_data(flow_id, flow.data, flow_name=flow.name, user_id=str(flow.user_id)) -async def build_graph_from_db(flow_id: str, session: Session, chat_service: "ChatService"): +async def build_graph_from_db(flow_id: str, session: Session, chat_service: ChatService): graph = await build_graph_from_db_no_cache(flow_id, session) await chat_service.set_cache(flow_id, graph) return graph @@ -155,7 +177,7 @@ async def build_graph_from_db(flow_id: str, session: Session, chat_service: "Cha async def build_and_cache_graph_from_data( flow_id: str, - chat_service: "ChatService", + chat_service: ChatService, graph_data: dict, ): # -> Graph | Any: """Build and cache the graph.""" @@ -189,8 +211,7 @@ def format_exception_message(exc: Exception) -> str: def get_top_level_vertices(graph, vertices_ids): - """ - Retrieves the top-level vertices from the given graph based on the provided vertex IDs. + """Retrieves the top-level vertices from the given graph based on the provided vertex IDs. Args: graph (Graph): The graph object containing the vertices. @@ -222,20 +243,43 @@ def get_suggestion_message(outdated_components: list[str]) -> str: count = len(outdated_components) if count == 0: return "The flow contains no outdated components." - elif count == 1: - return f"The flow contains 1 outdated component. We recommend updating the following component: {outdated_components[0]}." - else: - components = ", ".join(outdated_components) - return f"The flow contains {count} outdated components. We recommend updating the following components: {components}." + if count == 1: + return ( + "The flow contains 1 outdated component. " + f"We recommend updating the following component: {outdated_components[0]}." + ) + components = ", ".join(outdated_components) + return ( + f"The flow contains {count} outdated components. " + f"We recommend updating the following components: {components}." + ) def parse_value(value: Any, input_type: str) -> Any: """Helper function to parse the value based on input type.""" if value == "": return value - elif input_type == "IntInput": + if input_type == "IntInput": return int(value) if value is not None else None - elif input_type == "FloatInput": + if input_type == "FloatInput": return float(value) if value is not None else None - else: - return value + return value + + +async def cascade_delete_flow(session: AsyncSession, flow_id: uuid.UUID) -> None: + try: + await session.exec(delete(TransactionTable).where(TransactionTable.flow_id == flow_id)) + await session.exec(delete(VertexBuildTable).where(VertexBuildTable.flow_id == flow_id)) + await session.exec(delete(Flow).where(Flow.id == flow_id)) + except Exception as e: + msg = f"Unable to cascade delete flow: ${flow_id}" + raise RuntimeError(msg, e) from e + + +def custom_params( + page: int | None = Query(None), + size: int | None = Query(None), +): + if page is None and size is None: + return None + return Params(page=page or MIN_PAGE_SIZE, size=size or MAX_PAGE_SIZE) diff --git a/src/backend/base/langflow/api/v1/__init__.py b/src/backend/base/langflow/api/v1/__init__.py index 25b042a19b10..48383770ab77 100644 --- a/src/backend/base/langflow/api/v1/__init__.py +++ b/src/backend/base/langflow/api/v1/__init__.py @@ -3,27 +3,27 @@ from langflow.api.v1.endpoints import router as endpoints_router from langflow.api.v1.files import router as files_router from langflow.api.v1.flows import router as flows_router +from langflow.api.v1.folders import router as folders_router from langflow.api.v1.login import router as login_router from langflow.api.v1.monitor import router as monitor_router +from langflow.api.v1.starter_projects import router as starter_projects_router from langflow.api.v1.store import router as store_router from langflow.api.v1.users import router as users_router from langflow.api.v1.validate import router as validate_router from langflow.api.v1.variable import router as variables_router -from langflow.api.v1.folders import router as folders_router -from langflow.api.v1.starter_projects import router as starter_projects_router __all__ = [ + "api_key_router", "chat_router", "endpoints_router", - "store_router", - "validate_router", + "files_router", "flows_router", - "users_router", - "api_key_router", + "folders_router", "login_router", - "variables_router", "monitor_router", - "files_router", - "folders_router", "starter_projects_router", + "store_router", + "users_router", + "validate_router", + "variables_router", ] diff --git a/src/backend/base/langflow/api/v1/api_key.py b/src/backend/base/langflow/api/v1/api_key.py index fcf7f27861b8..09b542fd09d7 100644 --- a/src/backend/base/langflow/api/v1/api_key.py +++ b/src/backend/base/langflow/api/v1/api_key.py @@ -2,16 +2,15 @@ from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, Response -from sqlmodel import Session +from langflow.api.utils import AsyncDbSession, CurrentActiveUser, DbSession from langflow.api.v1.schemas import ApiKeyCreateRequest, ApiKeysResponse from langflow.services.auth import utils as auth_utils # Assuming you have these methods in your service layer from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key, get_api_keys from langflow.services.database.models.api_key.model import ApiKeyCreate, UnmaskedApiKeyRead -from langflow.services.database.models.user.model import User -from langflow.services.deps import get_session, get_settings_service +from langflow.services.deps import get_settings_service if TYPE_CHECKING: pass @@ -19,54 +18,53 @@ router = APIRouter(tags=["APIKey"], prefix="/api_key") -@router.get("/", response_model=ApiKeysResponse) -def get_api_keys_route( - db: Session = Depends(get_session), - current_user: User = Depends(auth_utils.get_current_active_user), -): +@router.get("/") +async def get_api_keys_route( + db: AsyncDbSession, + current_user: CurrentActiveUser, +) -> ApiKeysResponse: try: user_id = current_user.id - keys = get_api_keys(db, user_id) + keys = await get_api_keys(db, user_id) return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys) except Exception as exc: raise HTTPException(status_code=400, detail=str(exc)) from exc -@router.post("/", response_model=UnmaskedApiKeyRead) -def create_api_key_route( +@router.post("/") +async def create_api_key_route( req: ApiKeyCreate, - current_user: User = Depends(auth_utils.get_current_active_user), - db: Session = Depends(get_session), -): + current_user: CurrentActiveUser, + db: AsyncDbSession, +) -> UnmaskedApiKeyRead: try: user_id = current_user.id - return create_api_key(db, req, user_id=user_id) + return await create_api_key(db, req, user_id=user_id) except Exception as e: raise HTTPException(status_code=400, detail=str(e)) from e -@router.delete("/{api_key_id}") -def delete_api_key_route( +@router.delete("/{api_key_id}", dependencies=[Depends(auth_utils.get_current_active_user)]) +async def delete_api_key_route( api_key_id: UUID, - current_user=Depends(auth_utils.get_current_active_user), - db: Session = Depends(get_session), + db: AsyncDbSession, ): try: - delete_api_key(db, api_key_id) - return {"detail": "API Key deleted"} + await delete_api_key(db, api_key_id) except Exception as e: raise HTTPException(status_code=400, detail=str(e)) from e + return {"detail": "API Key deleted"} @router.post("/store") -def save_store_api_key( +async def save_store_api_key( api_key_request: ApiKeyCreateRequest, response: Response, - current_user: User = Depends(auth_utils.get_current_active_user), - db: Session = Depends(get_session), - settings_service=Depends(get_settings_service), + current_user: CurrentActiveUser, + db: DbSession, ): + settings_service = get_settings_service() auth_settings = settings_service.auth_settings try: @@ -88,19 +86,7 @@ def save_store_api_key( domain=auth_settings.COOKIE_DOMAIN, ) - return {"detail": "API Key saved"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) from e - -@router.delete("/store") -def delete_store_api_key( - current_user: User = Depends(auth_utils.get_current_active_user), - db: Session = Depends(get_session), -): - try: - current_user.store_api_key = None - db.commit() - return {"detail": "API Key deleted"} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) from e + return {"detail": "API Key saved"} diff --git a/src/backend/base/langflow/api/v1/base.py b/src/backend/base/langflow/api/v1/base.py index 879d684cde5a..879637b88935 100644 --- a/src/backend/base/langflow/api/v1/base.py +++ b/src/backend/base/langflow/api/v1/base.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, field_validator, model_serializer from langflow.template.frontend_node.base import FrontendNode @@ -14,7 +12,7 @@ class Code(BaseModel): class FrontendNodeRequest(FrontendNode): - template: dict # type: ignore + template: dict # type: ignore[assignment] @model_serializer(mode="wrap") def serialize_model(self, handler): @@ -26,8 +24,8 @@ def serialize_model(self, handler): class ValidatePromptRequest(BaseModel): name: str template: str - custom_fields: Optional[dict] = None - frontend_node: Optional[FrontendNodeRequest] = None + custom_fields: dict | None = None + frontend_node: FrontendNodeRequest | None = None # Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} @@ -49,4 +47,4 @@ def validate_function(cls, v): class PromptValidationResponse(BaseModel): input_variables: list # object return for tweak call - frontend_node: Optional[FrontendNodeRequest] = None + frontend_node: FrontendNodeRequest | None = None diff --git a/src/backend/base/langflow/api/v1/callback.py b/src/backend/base/langflow/api/v1/callback.py index 6a60ea037dc9..527241a64eb3 100644 --- a/src/backend/base/langflow/api/v1/callback.py +++ b/src/backend/base/langflow/api/v1/callback.py @@ -1,12 +1,16 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from uuid import UUID + +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.callbacks.base import AsyncCallbackHandler from loguru import logger +from typing_extensions import override from langflow.api.v1.schemas import ChatResponse, PromptResponse from langflow.services.deps import get_chat_service, get_socket_service from langflow.utils.util import remove_ansi_escape_codes -from langchain_core.agents import AgentAction, AgentFinish if TYPE_CHECKING: from langflow.services.socket.service import SocketIOService @@ -24,15 +28,17 @@ def ignore_chain(self) -> bool: def __init__(self, session_id: str): self.chat_service = get_chat_service() self.client_id = session_id - self.socketio_service: "SocketIOService" = get_socket_service() + self.socketio_service: SocketIOService = get_socket_service() self.sid = session_id # self.socketio_service = self.chat_service.active_connections[self.client_id] - async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: + @override + async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: # type: ignore[misc] resp = ChatResponse(message=token, type="stream", intermediate_steps="") await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) - async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> Any: + @override + async def on_tool_start(self, serialized: dict[str, Any], input_str: str, **kwargs: Any) -> Any: # type: ignore[misc] """Run when tool starts running.""" resp = ChatResponse( message="", @@ -64,28 +70,31 @@ async def on_tool_end(self, output: str, **kwargs: Any) -> Any: ) for word in rest_of_output ] - resps = [resp] + rest_of_resps + resps = [resp, *rest_of_resps] # Try to send the response, handle potential errors. try: # This is to emulate the stream of tokens for resp in resps: await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) - except Exception as exc: - logger.error(f"Error sending response: {exc}") + except Exception: # noqa: BLE001 + logger.exception("Error sending response") async def on_tool_error( self, error: BaseException, *, run_id: UUID, - parent_run_id: Optional[UUID] = None, - tags: Optional[List[str]] = None, + parent_run_id: UUID | None = None, + tags: list[str] | None = None, **kwargs: Any, ) -> None: """Run when tool errors.""" - async def on_text(self, text: str, **kwargs: Any) -> Any: + @override + async def on_text( # type: ignore[misc] + self, text: str, **kwargs: Any + ) -> Any: """Run on arbitrary text.""" # This runs when first sending the prompt # to the LLM, adding it will send the final prompt @@ -98,7 +107,10 @@ async def on_text(self, text: str, **kwargs: Any) -> Any: ) await self.socketio_service.emit_message(to=self.sid, data=resp.model_dump()) - async def on_agent_action(self, action: AgentAction, **kwargs: Any): + @override + async def on_agent_action( # type: ignore[misc] + self, action: AgentAction, **kwargs: Any + ) -> None: log = f"Thought: {action.log}" # if there are line breaks, split them and send them # as separate messages @@ -111,7 +123,10 @@ async def on_agent_action(self, action: AgentAction, **kwargs: Any): resp = ChatResponse(message="", type="stream", intermediate_steps=log) await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) - async def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> Any: + @override + async def on_agent_finish( # type: ignore[misc] + self, finish: AgentFinish, **kwargs: Any + ) -> Any: """Run on agent end.""" resp = ChatResponse( message="", diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py index a9743aec50fe..ce7dfc57974d 100644 --- a/src/backend/base/langflow/api/v1/chat.py +++ b/src/backend/base/langflow/api/v1/chat.py @@ -1,12 +1,14 @@ +from __future__ import annotations + import asyncio import json import time import traceback import typing import uuid -from typing import TYPE_CHECKING, Annotated, Optional +from typing import TYPE_CHECKING, Annotated -from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException +from fastapi import APIRouter, BackgroundTasks, Body, HTTPException from fastapi.responses import StreamingResponse from loguru import logger from starlette.background import BackgroundTask @@ -14,6 +16,8 @@ from starlette.types import Receive from langflow.api.utils import ( + CurrentActiveUser, + DbSession, build_and_cache_graph_from_data, build_graph_from_data, build_graph_from_db, @@ -31,19 +35,18 @@ VertexBuildResponse, VerticesOrderResponse, ) -from langflow.exceptions.component import ComponentBuildException +from langflow.events.event_manager import EventManager, create_default_event_manager +from langflow.exceptions.component import ComponentBuildError from langflow.graph.graph.base import Graph from langflow.graph.utils import log_vertex_build from langflow.schema.schema import OutputValue -from langflow.services.auth.utils import get_current_active_user +from langflow.services.cache.utils import CacheMiss from langflow.services.chat.service import ChatService -from langflow.services.deps import get_chat_service, get_session, get_session_service, get_telemetry_service +from langflow.services.deps import get_chat_service, get_session, get_telemetry_service from langflow.services.telemetry.schema import ComponentPayload, PlaygroundPayload -from langflow.services.telemetry.service import TelemetryService if TYPE_CHECKING: from langflow.graph.vertex.types import InterfaceVertex - from langflow.services.session.service import SessionService router = APIRouter(tags=["Chat"]) @@ -57,33 +60,31 @@ async def try_running_celery_task(vertex, user_id): task = build_vertex.delay(vertex) vertex.task_id = task.id - except Exception as exc: - logger.debug(f"Error running task in celery: {exc}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error running task in celery") vertex.task_id = None await vertex.build(user_id=user_id) return vertex -@router.post("/build/{flow_id}/vertices", response_model=VerticesOrderResponse) +@router.post("/build/{flow_id}/vertices") async def retrieve_vertices_order( + *, flow_id: uuid.UUID, background_tasks: BackgroundTasks, - data: Optional[Annotated[Optional[FlowDataRequest], Body(embed=True)]] = None, - stop_component_id: Optional[str] = None, - start_component_id: Optional[str] = None, - chat_service: "ChatService" = Depends(get_chat_service), - session=Depends(get_session), - telemetry_service: "TelemetryService" = Depends(get_telemetry_service), -): - """ - Retrieve the vertices order for a given flow. + data: Annotated[FlowDataRequest | None, Body(embed=True)] | None = None, + stop_component_id: str | None = None, + start_component_id: str | None = None, + session: DbSession, +) -> VerticesOrderResponse: + """Retrieve the vertices order for a given flow. Args: flow_id (str): The ID of the flow. + background_tasks (BackgroundTasks): The background tasks. data (Optional[FlowDataRequest], optional): The flow data. Defaults to None. stop_component_id (str, optional): The ID of the stop component. Defaults to None. start_component_id (str, optional): The ID of the start component. Defaults to None. - chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service). session (Session, optional): The session dependency. Defaults to Depends(get_session). Returns: @@ -92,6 +93,8 @@ async def retrieve_vertices_order( Raises: HTTPException: If there is an error checking the build status. """ + chat_service = get_chat_service() + telemetry_service = get_telemetry_service() start_time = time.perf_counter() components_count = None try: @@ -114,9 +117,9 @@ async def retrieve_vertices_order( background_tasks.add_task( telemetry_service.log_package_playground, PlaygroundPayload( - playgroundSeconds=int(time.perf_counter() - start_time), - playgroundComponentCount=components_count, - playgroundSuccess=True, + playground_seconds=int(time.perf_counter() - start_time), + playground_component_count=components_count, + playground_success=True, ), ) return VerticesOrderResponse(ids=graph.first_layer, run_id=graph.run_id, vertices_to_run=vertices_to_run) @@ -124,35 +127,38 @@ async def retrieve_vertices_order( background_tasks.add_task( telemetry_service.log_package_playground, PlaygroundPayload( - playgroundSeconds=int(time.perf_counter() - start_time), - playgroundComponentCount=components_count, - playgroundSuccess=False, - playgroundErrorMessage=str(exc), + playground_seconds=int(time.perf_counter() - start_time), + playground_component_count=components_count, + playground_success=False, + playground_error_message=str(exc), ), ) if "stream or streaming set to True" in str(exc): - raise HTTPException(status_code=400, detail=str(exc)) - logger.error(f"Error checking build status: {exc}") - logger.exception(exc) + raise HTTPException(status_code=400, detail=str(exc)) from exc + logger.exception("Error checking build status") raise HTTPException(status_code=500, detail=str(exc)) from exc @router.post("/build/{flow_id}/flow") async def build_flow( + *, background_tasks: BackgroundTasks, flow_id: uuid.UUID, - inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None, - data: Annotated[Optional[FlowDataRequest], Body(embed=True)] = None, - files: Optional[list[str]] = None, - stop_component_id: Optional[str] = None, - start_component_id: Optional[str] = None, - log_builds: Optional[bool] = True, - chat_service: "ChatService" = Depends(get_chat_service), - current_user=Depends(get_current_active_user), - telemetry_service: "TelemetryService" = Depends(get_telemetry_service), - session=Depends(get_session), + inputs: Annotated[InputValueRequest | None, Body(embed=True)] = None, + data: Annotated[FlowDataRequest | None, Body(embed=True)] = None, + files: list[str] | None = None, + stop_component_id: str | None = None, + start_component_id: str | None = None, + log_builds: bool | None = True, + current_user: CurrentActiveUser, + session: DbSession, ): - async def build_graph_and_get_order() -> tuple[list[str], list[str], "Graph"]: + chat_service = get_chat_service() + telemetry_service = get_telemetry_service() + if not inputs: + inputs = InputValueRequest(session=str(flow_id)) + + async def build_graph_and_get_order() -> tuple[list[str], list[str], Graph]: start_time = time.perf_counter() components_count = None try: @@ -160,17 +166,20 @@ async def build_graph_and_get_order() -> tuple[list[str], list[str], "Graph"]: if not data: graph = await build_graph_from_db_no_cache(flow_id=flow_id_str, session=session) else: - graph = await build_graph_from_data(flow_id_str, data.model_dump()) + graph = await build_graph_from_data(flow_id_str, data.model_dump(), user_id=str(current_user.id)) graph.validate_stream() if stop_component_id or start_component_id: try: first_layer = graph.sort_vertices(stop_component_id, start_component_id) - except Exception as exc: - logger.error(exc) + except Exception: # noqa: BLE001 + logger.exception("Error sorting vertices") first_layer = graph.sort_vertices() else: first_layer = graph.sort_vertices() + if inputs is not None and hasattr(inputs, "session") and inputs.session is not None: + graph.session_id = inputs.session + for vertex_id in first_layer: graph.run_manager.add_to_vertices_being_run(vertex_id) @@ -182,29 +191,29 @@ async def build_graph_and_get_order() -> tuple[list[str], list[str], "Graph"]: background_tasks.add_task( telemetry_service.log_package_playground, PlaygroundPayload( - playgroundSeconds=int(time.perf_counter() - start_time), - playgroundComponentCount=components_count, - playgroundSuccess=True, + playground_seconds=int(time.perf_counter() - start_time), + playground_component_count=components_count, + playground_success=True, ), ) - return first_layer, vertices_to_run, graph except Exception as exc: background_tasks.add_task( telemetry_service.log_package_playground, PlaygroundPayload( - playgroundSeconds=int(time.perf_counter() - start_time), - playgroundComponentCount=components_count, - playgroundSuccess=False, - playgroundErrorMessage=str(exc), + playground_seconds=int(time.perf_counter() - start_time), + playground_component_count=components_count, + playground_success=False, + playground_error_message=str(exc), ), ) if "stream or streaming set to True" in str(exc): - raise HTTPException(status_code=400, detail=str(exc)) - logger.error(f"Error checking build status: {exc}") - logger.exception(exc) + raise HTTPException(status_code=400, detail=str(exc)) from exc + logger.exception("Error checking build status") raise HTTPException(status_code=500, detail=str(exc)) from exc - async def _build_vertex(vertex_id: str, graph: "Graph") -> VertexBuildResponse: + return first_layer, vertices_to_run, graph + + async def _build_vertex(vertex_id: str, graph: Graph, event_manager: EventManager) -> VertexBuildResponse: flow_id_str = str(flow_id) next_runnable_vertices = [] @@ -214,14 +223,15 @@ async def _build_vertex(vertex_id: str, graph: "Graph") -> VertexBuildResponse: try: vertex = graph.get_vertex(vertex_id) try: - lock = chat_service._async_cache_locks[flow_id_str] + lock = chat_service.async_cache_locks[flow_id_str] vertex_build_result = await graph.build_vertex( vertex_id=vertex_id, - user_id=current_user.id, + user_id=str(current_user.id), inputs_dict=inputs.model_dump() if inputs else {}, files=files, get_cache=chat_service.get_cache, set_cache=chat_service.set_cache, + event_manager=event_manager, ) result_dict = vertex_build_result.result_dict params = vertex_build_result.params @@ -231,13 +241,13 @@ async def _build_vertex(vertex_id: str, graph: "Graph") -> VertexBuildResponse: top_level_vertices = graph.get_top_level_vertices(next_runnable_vertices) result_data_response = ResultDataResponse.model_validate(result_dict, from_attributes=True) - except Exception as exc: - if isinstance(exc, ComponentBuildException): + except Exception as exc: # noqa: BLE001 + if isinstance(exc, ComponentBuildError): params = exc.message tb = exc.formatted_traceback else: tb = traceback.format_exc() - logger.exception(f"Error building Component: {exc}") + logger.exception("Error building Component") params = format_exception_message(exc) message = {"errorMessage": params, "stackTrace": tb} valid = False @@ -294,42 +304,39 @@ async def _build_vertex(vertex_id: str, graph: "Graph") -> VertexBuildResponse: background_tasks.add_task( telemetry_service.log_package_component, ComponentPayload( - componentName=vertex_id.split("-")[0], - componentSeconds=int(time.perf_counter() - start_time), - componentSuccess=valid, - componentErrorMessage=error_message, + component_name=vertex_id.split("-")[0], + component_seconds=int(time.perf_counter() - start_time), + component_success=valid, + component_error_message=error_message, ), ) - return build_response except Exception as exc: background_tasks.add_task( telemetry_service.log_package_component, ComponentPayload( - componentName=vertex_id.split("-")[0], - componentSeconds=int(time.perf_counter() - start_time), - componentSuccess=False, - componentErrorMessage=str(exc), + component_name=vertex_id.split("-")[0], + component_seconds=int(time.perf_counter() - start_time), + component_success=False, + component_error_message=str(exc), ), ) - logger.error(f"Error building Component: \n\n{exc}") - logger.exception(exc) + logger.exception("Error building Component") message = parse_exception(exc) raise HTTPException(status_code=500, detail=message) from exc - def send_event(event_type: str, value: dict, queue: asyncio.Queue) -> None: - json_data = {"event": event_type, "data": value} - event_id = uuid.uuid4() - logger.debug(f"sending event {event_id}: {event_type}") - str_data = json.dumps(json_data) + "\n\n" - queue.put_nowait((event_id, str_data.encode("utf-8"), time.time())) + return build_response async def build_vertices( - vertex_id: str, graph: "Graph", queue: asyncio.Queue, client_consumed_queue: asyncio.Queue + vertex_id: str, + graph: Graph, + client_consumed_queue: asyncio.Queue, + event_manager: EventManager, ) -> None: - build_task = asyncio.create_task(await asyncio.to_thread(_build_vertex, vertex_id, graph)) + build_task = asyncio.create_task(_build_vertex(vertex_id, graph, event_manager)) try: await build_task - except asyncio.CancelledError: + except asyncio.CancelledError as exc: + logger.exception(exc) build_task.cancel() return @@ -339,26 +346,26 @@ async def build_vertices( vertex_build_response_json = vertex_build_response.model_dump_json() build_data = json.loads(vertex_build_response_json) except Exception as exc: - raise ValueError(f"Error serializing vertex build response: {exc}") from exc - send_event("end_vertex", {"build_data": build_data}, queue) + msg = f"Error serializing vertex build response: {exc}" + raise ValueError(msg) from exc + event_manager.on_end_vertex(data={"build_data": build_data}) await client_consumed_queue.get() - if vertex_build_response.valid: - if vertex_build_response.next_vertices_ids: - tasks = [] - for next_vertex_id in vertex_build_response.next_vertices_ids: - task = asyncio.create_task(build_vertices(next_vertex_id, graph, queue, client_consumed_queue)) - tasks.append(task) - try: - await asyncio.gather(*tasks) - except asyncio.CancelledError: - for task in tasks: - task.cancel() - return + if vertex_build_response.valid and vertex_build_response.next_vertices_ids: + tasks = [] + for next_vertex_id in vertex_build_response.next_vertices_ids: + task = asyncio.create_task(build_vertices(next_vertex_id, graph, client_consumed_queue, event_manager)) + tasks.append(task) + try: + await asyncio.gather(*tasks) + except asyncio.CancelledError: + for task in tasks: + task.cancel() + return - async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Queue) -> None: + async def event_generator(event_manager: EventManager, client_consumed_queue: asyncio.Queue) -> None: if not data: - # using another thread since the DB query is I/O bound - vertices_task = asyncio.create_task(await asyncio.to_thread(build_graph_and_get_order)) + # using another task since the build_graph_and_get_order is now an async function + vertices_task = asyncio.create_task(build_graph_and_get_order()) try: await vertices_task except asyncio.CancelledError: @@ -366,10 +373,10 @@ async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Q return except Exception as e: if isinstance(e, HTTPException): - send_event("error", {"error": str(e.detail), "statusCode": e.status_code}, queue) - raise e - send_event("error", {"error": str(e)}, queue) - raise e + event_manager.on_error(data={"error": str(e.detail), "statusCode": e.status_code}) + raise + event_manager.on_error(data={"error": str(e)}) + raise ids, vertices_to_run, graph = vertices_task.result() else: @@ -377,16 +384,16 @@ async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Q ids, vertices_to_run, graph = await build_graph_and_get_order() except Exception as e: if isinstance(e, HTTPException): - send_event("error", {"error": str(e.detail), "statusCode": e.status_code}, queue) - raise e - send_event("error", {"error": str(e)}, queue) - raise e - send_event("vertices_sorted", {"ids": ids, "to_run": vertices_to_run}, queue) + event_manager.on_error(data={"error": str(e.detail), "statusCode": e.status_code}) + raise + event_manager.on_error(data={"error": str(e)}) + raise + event_manager.on_vertices_sorted(data={"ids": ids, "to_run": vertices_to_run}) await client_consumed_queue.get() tasks = [] for vertex_id in ids: - task = asyncio.create_task(build_vertices(vertex_id, graph, queue, client_consumed_queue)) + task = asyncio.create_task(build_vertices(vertex_id, graph, client_consumed_queue, event_manager)) tasks.append(task) try: await asyncio.gather(*tasks) @@ -395,8 +402,8 @@ async def event_generator(queue: asyncio.Queue, client_consumed_queue: asyncio.Q for task in tasks: task.cancel() return - send_event("end", {}, queue) - await queue.put((None, None, time.time)) + event_manager.on_end(data={}) + await event_manager.queue.put((None, None, time.time)) async def consume_and_yield(queue: asyncio.Queue, client_consumed_queue: asyncio.Queue) -> typing.AsyncGenerator: while True: @@ -408,14 +415,17 @@ async def consume_and_yield(queue: asyncio.Queue, client_consumed_queue: asyncio get_time_yield = time.time() client_consumed_queue.put_nowait(event_id) logger.debug( - f"consumed event {str(event_id)} (time in queue, {get_time - put_time:.4f}, client {get_time_yield - get_time:.4f})" + f"consumed event {event_id} " + f"(time in queue, {get_time - put_time:.4f}, " + f"client {get_time_yield - get_time:.4f})" ) asyncio_queue: asyncio.Queue = asyncio.Queue() asyncio_queue_client_consumed: asyncio.Queue = asyncio.Queue() - main_task = asyncio.create_task(event_generator(asyncio_queue, asyncio_queue_client_consumed)) + event_manager = create_default_event_manager(queue=asyncio_queue) + main_task = asyncio.create_task(event_generator(event_manager, asyncio_queue_client_consumed)) - def on_disconnect(): + def on_disconnect() -> None: logger.debug("Client disconnected, closing tasks") main_task.cancel() @@ -434,7 +444,7 @@ def __init__( headers: typing.Mapping[str, str] | None = None, media_type: str | None = None, background: BackgroundTask | None = None, - on_disconnect: Optional[typing.Callable] = None, + on_disconnect: typing.Callable | None = None, ): super().__init__(content, status_code, headers, media_type, background) self.on_disconnect = on_disconnect @@ -444,21 +454,22 @@ async def listen_for_disconnect(self, receive: Receive) -> None: message = await receive() if message["type"] == "http.disconnect": if self.on_disconnect: - await self.on_disconnect() + coro = self.on_disconnect() + if asyncio.iscoroutine(coro): + await coro break @router.post("/build/{flow_id}/vertices/{vertex_id}") async def build_vertex( + *, flow_id: uuid.UUID, vertex_id: str, background_tasks: BackgroundTasks, - inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None, - files: Optional[list[str]] = None, - chat_service: "ChatService" = Depends(get_chat_service), - current_user=Depends(get_current_active_user), - telemetry_service: "TelemetryService" = Depends(get_telemetry_service), -): + inputs: Annotated[InputValueRequest | None, Body(embed=True)] = None, + files: list[str] | None = None, + current_user: CurrentActiveUser, +) -> VertexBuildResponse: """Build a vertex instead of the entire graph. Args: @@ -466,7 +477,7 @@ async def build_vertex( vertex_id (str): The ID of the vertex to build. background_tasks (BackgroundTasks): The background tasks dependency. inputs (Optional[InputValueRequest], optional): The input values for the vertex. Defaults to None. - chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service). + files (List[str], optional): The files to use. Defaults to None. current_user (Any, optional): The current user dependency. Defaults to Depends(get_current_active_user). Returns: @@ -476,6 +487,8 @@ async def build_vertex( HTTPException: If there is an error building the vertex. """ + chat_service = get_chat_service() + telemetry_service = get_telemetry_service() flow_id_str = str(flow_id) next_runnable_vertices = [] @@ -484,10 +497,10 @@ async def build_vertex( error_message = None try: cache = await chat_service.get_cache(flow_id_str) - if not cache: + if isinstance(cache, CacheMiss): # If there's no cache logger.warning(f"No cache found for {flow_id_str}. Building graph starting at {vertex_id}") - graph: "Graph" = await build_graph_from_db( + graph: Graph = await build_graph_from_db( flow_id=flow_id_str, session=next(get_session()), chat_service=chat_service ) else: @@ -496,10 +509,10 @@ async def build_vertex( vertex = graph.get_vertex(vertex_id) try: - lock = chat_service._async_cache_locks[flow_id_str] + lock = chat_service.async_cache_locks[flow_id_str] vertex_build_result = await graph.build_vertex( vertex_id=vertex_id, - user_id=current_user.id, + user_id=str(current_user.id), inputs_dict=inputs.model_dump() if inputs else {}, files=files, get_cache=chat_service.get_cache, @@ -512,13 +525,13 @@ async def build_vertex( next_runnable_vertices = await graph.get_next_runnable_vertices(lock, vertex=vertex, cache=False) top_level_vertices = graph.get_top_level_vertices(next_runnable_vertices) result_data_response = ResultDataResponse.model_validate(result_dict, from_attributes=True) - except Exception as exc: - if isinstance(exc, ComponentBuildException): + except Exception as exc: # noqa: BLE001 + if isinstance(exc, ComponentBuildError): params = exc.message tb = exc.formatted_traceback else: tb = traceback.format_exc() - logger.exception(f"Error building Component: {exc}") + logger.exception("Error building Component") params = format_exception_message(exc) message = {"errorMessage": params, "stackTrace": tb} valid = False @@ -579,36 +592,115 @@ async def build_vertex( background_tasks.add_task( telemetry_service.log_package_component, ComponentPayload( - componentName=vertex_id.split("-")[0], - componentSeconds=int(time.perf_counter() - start_time), - componentSuccess=valid, - componentErrorMessage=error_message, + component_name=vertex_id.split("-")[0], + component_seconds=int(time.perf_counter() - start_time), + component_success=valid, + component_error_message=error_message, ), ) - return build_response except Exception as exc: background_tasks.add_task( telemetry_service.log_package_component, ComponentPayload( - componentName=vertex_id.split("-")[0], - componentSeconds=int(time.perf_counter() - start_time), - componentSuccess=False, - componentErrorMessage=str(exc), + component_name=vertex_id.split("-")[0], + component_seconds=int(time.perf_counter() - start_time), + component_success=False, + component_error_message=str(exc), ), ) - logger.error(f"Error building Component: \n\n{exc}") - logger.exception(exc) + logger.exception("Error building Component") message = parse_exception(exc) raise HTTPException(status_code=500, detail=message) from exc + return build_response + + +async def _stream_vertex(flow_id: str, vertex_id: str, chat_service: ChatService): + graph = None + try: + try: + cache = await chat_service.get_cache(flow_id) + except Exception as exc: # noqa: BLE001 + logger.exception("Error building Component") + yield str(StreamData(event="error", data={"error": str(exc)})) + return + + if isinstance(cache, CacheMiss): + # If there's no cache + msg = f"No cache found for {flow_id}." + logger.error(msg) + yield str(StreamData(event="error", data={"error": msg})) + return + else: + graph = cache.get("result") + + try: + vertex: InterfaceVertex = graph.get_vertex(vertex_id) + except Exception as exc: # noqa: BLE001 + logger.exception("Error building Component") + yield str(StreamData(event="error", data={"error": str(exc)})) + return + + if not hasattr(vertex, "stream"): + msg = f"Vertex {vertex_id} does not support streaming" + logger.error(msg) + yield str(StreamData(event="error", data={"error": msg})) + return + + if isinstance(vertex.built_result, str) and vertex.built_result: + stream_data = StreamData( + event="message", + data={"message": f"Streaming vertex {vertex_id}"}, + ) + yield str(stream_data) + stream_data = StreamData( + event="message", + data={"chunk": vertex.built_result}, + ) + yield str(stream_data) + + elif not vertex.frozen or not vertex.built: + logger.debug(f"Streaming vertex {vertex_id}") + stream_data = StreamData( + event="message", + data={"message": f"Streaming vertex {vertex_id}"}, + ) + yield str(stream_data) + try: + async for chunk in vertex.stream(): + stream_data = StreamData( + event="message", + data={"chunk": chunk}, + ) + yield str(stream_data) + except Exception as exc: # noqa: BLE001 + logger.exception("Error building Component") + exc_message = parse_exception(exc) + if exc_message == "The message must be an iterator or an async iterator.": + exc_message = "This stream has already been closed." + yield str(StreamData(event="error", data={"error": exc_message})) + elif vertex.result is not None: + stream_data = StreamData( + event="message", + data={"chunk": vertex.built_result}, + ) + yield str(stream_data) + else: + msg = f"No result found for vertex {vertex_id}" + logger.error(msg) + yield str(StreamData(event="error", data={"error": msg})) + return + finally: + logger.debug("Closing stream") + if graph: + await chat_service.set_cache(flow_id, graph) + yield str(StreamData(event="close", data={"message": "Stream closed"})) + @router.get("/build/{flow_id}/{vertex_id}/stream", response_class=StreamingResponse) async def build_vertex_stream( flow_id: uuid.UUID, vertex_id: str, - session_id: Optional[str] = None, - chat_service: "ChatService" = Depends(get_chat_service), - session_service: "SessionService" = Depends(get_session_service), ): """Build a vertex instead of the entire graph. @@ -636,65 +728,8 @@ async def build_vertex_stream( HTTPException: If an error occurs while building the vertex. """ try: - flow_id_str = str(flow_id) - - async def stream_vertex(): - try: - cache = await chat_service.get_cache(flow_id_str) - if not cache: - # If there's no cache - raise ValueError(f"No cache found for {flow_id_str}.") - else: - graph = cache.get("result") - - vertex: "InterfaceVertex" = graph.get_vertex(vertex_id) - if not hasattr(vertex, "stream"): - raise ValueError(f"Vertex {vertex_id} does not support streaming") - if isinstance(vertex._built_result, str) and vertex._built_result: - stream_data = StreamData( - event="message", - data={"message": f"Streaming vertex {vertex_id}"}, - ) - yield str(stream_data) - stream_data = StreamData( - event="message", - data={"chunk": vertex._built_result}, - ) - yield str(stream_data) - - elif not vertex.frozen or not vertex._built: - logger.debug(f"Streaming vertex {vertex_id}") - stream_data = StreamData( - event="message", - data={"message": f"Streaming vertex {vertex_id}"}, - ) - yield str(stream_data) - async for chunk in vertex.stream(): - stream_data = StreamData( - event="message", - data={"chunk": chunk}, - ) - yield str(stream_data) - elif vertex.result is not None: - stream_data = StreamData( - event="message", - data={"chunk": vertex._built_result}, - ) - yield str(stream_data) - else: - raise ValueError(f"No result found for vertex {vertex_id}") - - except Exception as exc: - logger.exception(f"Error building Component: {exc}") - exc_message = parse_exception(exc) - if exc_message == "The message must be an iterator or an async iterator.": - exc_message = "This stream has already been closed." - yield str(StreamData(event="error", data={"error": exc_message})) - finally: - logger.debug("Closing stream") - await chat_service.set_cache(flow_id_str, graph) - yield str(StreamData(event="close", data={"message": "Stream closed"})) - - return StreamingResponse(stream_vertex(), media_type="text/event-stream") + return StreamingResponse( + _stream_vertex(str(flow_id), vertex_id, get_chat_service()), media_type="text/event-stream" + ) except Exception as exc: raise HTTPException(status_code=500, detail="Error building Component") from exc diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py index 926c7ecb6778..000618fb6280 100644 --- a/src/backend/base/langflow/api/v1/endpoints.py +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -1,21 +1,30 @@ +from __future__ import annotations + import time -from asyncio import Lock from http import HTTPStatus -from typing import TYPE_CHECKING, Annotated, List, Optional, Union +from typing import TYPE_CHECKING, Annotated from uuid import UUID import sqlalchemy as sa -from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException, Request, UploadFile, status +from fastapi import ( + APIRouter, + BackgroundTasks, + Body, + Depends, + HTTPException, + Request, + UploadFile, + status, +) from loguru import logger -from sqlmodel import Session, select +from sqlmodel import select -from langflow.api.utils import parse_value +from langflow.api.utils import CurrentActiveUser, DbSession, parse_value from langflow.api.v1.schemas import ( ConfigResponse, CustomComponentRequest, CustomComponentResponse, InputValueRequest, - ProcessResponse, RunResponse, SimplifiedAPIRequest, TaskStatusResponse, @@ -24,10 +33,11 @@ ) from langflow.custom.custom_component.component import Component from langflow.custom.utils import build_custom_component_template, get_instance_name -from langflow.exceptions.api import APIException, InvalidChatInputException +from langflow.exceptions.api import APIException, InvalidChatInputError from langflow.graph.graph.base import Graph from langflow.graph.schema import RunOutputs from langflow.helpers.flow import get_flow_by_id_or_endpoint_name +from langflow.helpers.user import get_user_by_flow_id_or_endpoint_name from langflow.interface.initialize.loading import update_params_with_load_from_db_fields from langflow.processing.process import process_tweaks, run_graph_internal from langflow.schema.graph import Tweaks @@ -35,50 +45,33 @@ from langflow.services.cache.utils import save_uploaded_file from langflow.services.database.models.flow import Flow from langflow.services.database.models.flow.model import FlowRead -from langflow.services.database.models.flow.utils import get_all_webhook_components_in_flow -from langflow.services.database.models.user.model import User, UserRead -from langflow.services.deps import ( - get_cache_service, - get_session, - get_session_service, - get_settings_service, - get_task_service, - get_telemetry_service, +from langflow.services.database.models.flow.utils import ( + get_all_webhook_components_in_flow, ) -from langflow.services.session.service import SessionService -from langflow.services.task.service import TaskService +from langflow.services.database.models.user.model import User, UserRead +from langflow.services.deps import get_session_service, get_settings_service, get_task_service, get_telemetry_service +from langflow.services.settings.feature_flags import FEATURE_FLAGS from langflow.services.telemetry.schema import RunPayload -from langflow.services.telemetry.service import TelemetryService from langflow.utils.version import get_version_info if TYPE_CHECKING: - from langflow.services.cache.base import CacheService from langflow.services.settings.service import SettingsService router = APIRouter(tags=["Base"]) @router.get("/all", dependencies=[Depends(get_current_active_user)]) -async def get_all( - settings_service=Depends(get_settings_service), - cache_service: "CacheService" = Depends(dependency=get_cache_service), - force_refresh: bool = False, -): +async def get_all(): from langflow.interface.types import get_and_cache_all_types_dict try: - async with Lock() as lock: - all_types_dict = await get_and_cache_all_types_dict( - settings_service=settings_service, cache_service=cache_service, force_refresh=force_refresh, lock=lock - ) + return await get_and_cache_all_types_dict(settings_service=get_settings_service()) - return all_types_dict except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -def validate_input_and_tweaks(input_request: SimplifiedAPIRequest): +def validate_input_and_tweaks(input_request: SimplifiedAPIRequest) -> None: # If the input_value is not None and the input_type is "chat" # then we need to check the tweaks if the ChatInput component is present # and if its input_value is not None @@ -91,38 +84,41 @@ def validate_input_and_tweaks(input_request: SimplifiedAPIRequest): has_input_value = value.get("input_value") is not None input_value_is_chat = input_request.input_value is not None and input_request.input_type == "chat" if has_input_value and input_value_is_chat: - raise InvalidChatInputException( - "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." - ) - elif "Text Input" in key or "TextInput" in key: - if isinstance(value, dict): - has_input_value = value.get("input_value") is not None - input_value_is_text = input_request.input_value is not None and input_request.input_type == "text" - if has_input_value and input_value_is_text: - raise InvalidChatInputException( - "If you pass an input_value to the text input, you cannot pass a tweak with the same name." - ) + msg = "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." + raise InvalidChatInputError(msg) + elif ("Text Input" in key or "TextInput" in key) and isinstance(value, dict): + has_input_value = value.get("input_value") is not None + input_value_is_text = input_request.input_value is not None and input_request.input_type == "text" + if has_input_value and input_value_is_text: + msg = "If you pass an input_value to the text input, you cannot pass a tweak with the same name." + raise InvalidChatInputError(msg) async def simple_run_flow( flow: Flow, input_request: SimplifiedAPIRequest, + *, stream: bool = False, - api_key_user: Optional[User] = None, + api_key_user: User | None = None, ): if input_request.input_value is not None and input_request.tweaks is not None: validate_input_and_tweaks(input_request) try: - task_result: List[RunOutputs] = [] + task_result: list[RunOutputs] = [] user_id = api_key_user.id if api_key_user else None flow_id_str = str(flow.id) if flow.data is None: - raise ValueError(f"Flow {flow_id_str} has no data") + msg = f"Flow {flow_id_str} has no data" + raise ValueError(msg) graph_data = flow.data.copy() graph_data = process_tweaks(graph_data, input_request.tweaks or {}, stream=stream) graph = Graph.from_payload(graph_data, flow_id=flow_id_str, user_id=str(user_id), flow_name=flow.name) inputs = [ - InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type) + InputValueRequest( + components=[], + input_value=input_request.input_value, + type=input_request.input_type, + ) ] if input_request.output_component: outputs = [input_request.output_component] @@ -133,7 +129,7 @@ async def simple_run_flow( if input_request.output_type == "debug" or ( vertex.is_output - and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) # type: ignore + and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) # type: ignore[operator] ) ] task_result, session_id = await run_graph_internal( @@ -154,60 +150,72 @@ async def simple_run_flow( async def simple_run_flow_task( flow: Flow, input_request: SimplifiedAPIRequest, + *, stream: bool = False, - api_key_user: Optional[User] = None, + api_key_user: User | None = None, ): - """ - Run a flow task as a BackgroundTask, therefore it should not throw exceptions. - """ + """Run a flow task as a BackgroundTask, therefore it should not throw exceptions.""" try: - result = await simple_run_flow( + return await simple_run_flow( flow=flow, input_request=input_request, stream=stream, api_key_user=api_key_user, ) - return result - except Exception as exc: - logger.exception(f"Error running flow {flow.id} task: {exc}") + except Exception: # noqa: BLE001 + logger.exception(f"Error running flow {flow.id} task") -@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True) +@router.post("/run/{flow_id_or_name}", response_model_exclude_none=True) # noqa: RUF100, FAST003 async def simplified_run_flow( + *, background_tasks: BackgroundTasks, flow: Annotated[FlowRead | None, Depends(get_flow_by_id_or_endpoint_name)], - input_request: SimplifiedAPIRequest = SimplifiedAPIRequest(), + input_request: SimplifiedAPIRequest | None = None, stream: bool = False, - api_key_user: UserRead = Depends(api_key_security), - telemetry_service: "TelemetryService" = Depends(get_telemetry_service), -): - """ - Executes a specified flow by ID with input customization, performance enhancements through caching, and optional data streaming. + api_key_user: Annotated[UserRead, Depends(api_key_security)], +) -> RunResponse: + """Executes a specified flow by ID. + + Executes a specified flow by ID with input customization, performance enhancements through caching, + and optional data streaming. ### Parameters: - `db` (Session): Database session for executing queries. - `flow_id_or_name` (str): ID or endpoint name of the flow to run. - - `input_request` (SimplifiedAPIRequest): Request object containing input values, types, output selection, tweaks, and session ID. + - `input_request` (SimplifiedAPIRequest): Request object containing input values, types, output selection, tweaks, + and session ID. - `api_key_user` (User): User object derived from the provided API key, used for authentication. - `session_service` (SessionService): Service for managing flow sessions, essential for session reuse and caching. ### SimplifiedAPIRequest: - `input_value` (Optional[str], default=""): Input value to pass to the flow. - - `input_type` (Optional[Literal["chat", "text", "any"]], default="chat"): Type of the input value, determining how the input is interpreted. - - `output_type` (Optional[Literal["chat", "text", "any", "debug"]], default="chat"): Desired type of output, affecting which components' outputs are included in the response. If set to "debug", all outputs are returned. - - `output_component` (Optional[str], default=None): Specific component output to retrieve. If provided, only the output of the specified component is returned. This overrides the `output_type` parameter. - - `tweaks` (Optional[Tweaks], default=None): Adjustments to the flow's behavior, allowing for custom execution parameters. - - `session_id` (Optional[str], default=None): An identifier for reusing session data, aiding in performance for subsequent requests. + - `input_type` (Optional[Literal["chat", "text", "any"]], default="chat"): Type of the input value, + determining how the input is interpreted. + - `output_type` (Optional[Literal["chat", "text", "any", "debug"]], default="chat"): Desired type of output, + affecting which components' outputs are included in the response. If set to "debug", all outputs are returned. + - `output_component` (Optional[str], default=None): Specific component output to retrieve. If provided, + only the output of the specified component is returned. This overrides the `output_type` parameter. + - `tweaks` (Optional[Tweaks], default=None): Adjustments to the flow's behavior, allowing for custom execution + parameters. + - `session_id` (Optional[str], default=None): An identifier for reusing session data, aiding in performance for + subsequent requests. ### Tweaks - A dictionary of tweaks to customize the flow execution. The tweaks can be used to modify the flow's parameters and components. Tweaks can be overridden by the input values. - You can use Component's `id` or Display Name as key to tweak a specific component (e.g., `{"Component Name": {"parameter_name": "value"}}`). - You can also use the parameter name as key to tweak all components with that parameter (e.g., `{"parameter_name": "value"}`). + A dictionary of tweaks to customize the flow execution. + The tweaks can be used to modify the flow's parameters and components. + Tweaks can be overridden by the input values. + You can use Component's `id` or Display Name as key to tweak a specific component + (e.g., `{"Component Name": {"parameter_name": "value"}}`). + You can also use the parameter name as key to tweak all components with that parameter + (e.g., `{"parameter_name": "value"}`). ### Returns: - - A `RunResponse` object containing the execution results, including selected (or all, based on `output_type`) outputs of the flow and the session ID, facilitating result retrieval and further interactions in a session context. + - A `RunResponse` object containing the execution results, including selected (or all, based on `output_type`) + outputs of the flow and the session ID, facilitating result retrieval and further interactions in a session + context. ### Raises: - HTTPException: 404 if the specified flow ID curl -X 'POST' \ @@ -228,8 +236,12 @@ async def simplified_run_flow( }' ``` - This endpoint provides a powerful interface for executing flows with enhanced flexibility and efficiency, supporting a wide range of applications by allowing for dynamic input and output configuration along with performance optimizations through session management and caching. + This endpoint provides a powerful interface for executing flows with enhanced flexibility and efficiency, + supporting a wide range of applications by allowing for dynamic input and output configuration along with + performance optimizations through session management and caching. """ + telemetry_service = get_telemetry_service() + input_request = input_request if input_request is not None else SimplifiedAPIRequest() if flow is None: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Flow not found") start_time = time.perf_counter() @@ -243,18 +255,22 @@ async def simplified_run_flow( end_time = time.perf_counter() background_tasks.add_task( telemetry_service.log_package_run, - RunPayload(runIsWebhook=False, runSeconds=int(end_time - start_time), runSuccess=True, runErrorMessage=""), + RunPayload( + run_is_webhook=False, + run_seconds=int(end_time - start_time), + run_success=True, + run_error_message="", + ), ) - return result except ValueError as exc: background_tasks.add_task( telemetry_service.log_package_run, RunPayload( - runIsWebhook=False, - runSeconds=int(time.perf_counter() - start_time), - runSuccess=False, - runErrorMessage=str(exc), + run_is_webhook=False, + run_seconds=int(time.perf_counter() - start_time), + run_success=False, + run_error_message=str(exc), ), ) if "badly formed hexadecimal UUID string" in str(exc): @@ -262,43 +278,38 @@ async def simplified_run_flow( raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc if "not found" in str(exc): raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - else: - logger.exception(exc) - raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc - except InvalidChatInputException as exc: - logger.error(exc) + raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc + except InvalidChatInputError as exc: raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc except Exception as exc: - logger.exception(exc) background_tasks.add_task( telemetry_service.log_package_run, RunPayload( - runIsWebhook=False, - runSeconds=int(time.perf_counter() - start_time), - runSuccess=False, - runErrorMessage=str(exc), + run_is_webhook=False, + run_seconds=int(time.perf_counter() - start_time), + run_success=False, + run_error_message=str(exc), ), ) - logger.exception(exc) raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc + return result -@router.post("/webhook/{flow_id_or_name}", response_model=dict, status_code=HTTPStatus.ACCEPTED) + +@router.post("/webhook/{flow_id_or_name}", response_model=dict, status_code=HTTPStatus.ACCEPTED) # noqa: RUF100, FAST003 async def webhook_run_flow( flow: Annotated[Flow, Depends(get_flow_by_id_or_endpoint_name)], + user: Annotated[User, Depends(get_user_by_flow_id_or_endpoint_name)], request: Request, background_tasks: BackgroundTasks, - telemetry_service: "TelemetryService" = Depends(get_telemetry_service), ): - """ - Run a flow using a webhook request. + """Run a flow using a webhook request. Args: - db (Session): The database session. + flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id). + user (User): The flow user. request (Request): The incoming HTTP request. background_tasks (BackgroundTasks): The background tasks manager. - session_service (SessionService, optional): The session service. Defaults to Depends(get_session_service). - flow (Flow, optional): The flow to be executed. Defaults to Depends(get_flow_by_id). Returns: dict: A dictionary containing the status of the task. @@ -306,89 +317,102 @@ async def webhook_run_flow( Raises: HTTPException: If the flow is not found or if there is an error processing the request. """ + telemetry_service = get_telemetry_service() + start_time = time.perf_counter() + logger.debug("Received webhook request") + error_msg = "" try: - start_time = time.perf_counter() - logger.debug("Received webhook request") - data = await request.body() + try: + data = await request.body() + except Exception as exc: + error_msg = str(exc) + raise HTTPException(status_code=500, detail=error_msg) from exc + if not data: - logger.error("Request body is empty") - raise ValueError( - "Request body is empty. You should provide a JSON payload containing the flow ID.", + error_msg = "Request body is empty. You should provide a JSON payload containing the flow ID." + raise HTTPException(status_code=400, detail=error_msg) + + try: + # get all webhook components in the flow + webhook_components = get_all_webhook_components_in_flow(flow.data) + tweaks = {} + + for component in webhook_components: + tweaks[component["id"]] = {"data": data.decode() if isinstance(data, bytes) else data} + input_request = SimplifiedAPIRequest( + input_value="", + input_type="chat", + output_type="chat", + tweaks=tweaks, + session_id=None, ) - # get all webhook components in the flow - webhook_components = get_all_webhook_components_in_flow(flow.data) - tweaks = {} - - for component in webhook_components: - tweaks[component["id"]] = {"data": data.decode() if isinstance(data, bytes) else data} - input_request = SimplifiedAPIRequest( - input_value="", - input_type="chat", - output_type="chat", - tweaks=tweaks, - session_id=None, - ) - logger.debug("Starting background task") - background_tasks.add_task( # type: ignore - simple_run_flow_task, - flow=flow, - input_request=input_request, - ) - background_tasks.add_task( - telemetry_service.log_package_run, - RunPayload( - runIsWebhook=True, runSeconds=int(time.perf_counter() - start_time), runSuccess=True, runErrorMessage="" - ), - ) - return {"message": "Task started in the background", "status": "in progress"} - except Exception as exc: + logger.debug("Starting background task") + background_tasks.add_task( + simple_run_flow_task, + flow=flow, + input_request=input_request, + api_key_user=user, + ) + except Exception as exc: + error_msg = str(exc) + raise HTTPException(status_code=500, detail=error_msg) from exc + finally: background_tasks.add_task( telemetry_service.log_package_run, RunPayload( - runIsWebhook=True, - runSeconds=int(time.perf_counter() - start_time), - runSuccess=False, - runErrorMessage=str(exc), + run_is_webhook=True, + run_seconds=int(time.perf_counter() - start_time), + run_success=not error_msg, + run_error_message=error_msg, ), ) - if "Flow ID is required" in str(exc) or "Request body is empty" in str(exc): - raise HTTPException(status_code=400, detail=str(exc)) from exc - logger.exception(exc) - raise HTTPException(status_code=500, detail=str(exc)) from exc + return {"message": "Task started in the background", "status": "in progress"} -@router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) + +@router.post( + "/run/advanced/{flow_id}", + response_model=RunResponse, + response_model_exclude_none=True, +) async def experimental_run_flow( - session: Annotated[Session, Depends(get_session)], + *, + session: DbSession, flow_id: UUID, - inputs: Optional[List[InputValueRequest]] = [InputValueRequest(components=[], input_value="")], - outputs: Optional[List[str]] = [], - tweaks: Annotated[Optional[Tweaks], Body(embed=True)] = None, # noqa: F821 - stream: Annotated[bool, Body(embed=True)] = False, # noqa: F821 - session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 - api_key_user: UserRead = Depends(api_key_security), - session_service: SessionService = Depends(get_session_service), -): - """ - Executes a specified flow by ID with optional input values, output selection, tweaks, and streaming capability. + inputs: list[InputValueRequest] | None = None, + outputs: list[str] | None = None, + tweaks: Annotated[Tweaks | None, Body(embed=True)] = None, + stream: Annotated[bool, Body(embed=True)] = False, + session_id: Annotated[None | str, Body(embed=True)] = None, + api_key_user: Annotated[UserRead, Depends(api_key_security)], +) -> RunResponse: + """Executes a specified flow by ID with optional input values, output selection, tweaks, and streaming capability. + This endpoint supports running flows with caching to enhance performance and efficiency. ### Parameters: - `flow_id` (str): The unique identifier of the flow to be executed. - - `inputs` (List[InputValueRequest], optional): A list of inputs specifying the input values and components for the flow. Each input can target specific components and provide custom values. - - `outputs` (List[str], optional): A list of output names to retrieve from the executed flow. If not provided, all outputs are returned. - - `tweaks` (Optional[Tweaks], optional): A dictionary of tweaks to customize the flow execution. The tweaks can be used to modify the flow's parameters and components. Tweaks can be overridden by the input values. + - `inputs` (List[InputValueRequest], optional): A list of inputs specifying the input values and components + for the flow. Each input can target specific components and provide custom values. + - `outputs` (List[str], optional): A list of output names to retrieve from the executed flow. + If not provided, all outputs are returned. + - `tweaks` (Optional[Tweaks], optional): A dictionary of tweaks to customize the flow execution. + The tweaks can be used to modify the flow's parameters and components. + Tweaks can be overridden by the input values. - `stream` (bool, optional): Specifies whether the results should be streamed. Defaults to False. - - `session_id` (Union[None, str], optional): An optional session ID to utilize existing session data for the flow execution. + - `session_id` (Union[None, str], optional): An optional session ID to utilize existing session data for the flow + execution. - `api_key_user` (User): The user associated with the current API key. Automatically resolved from the API key. - - `session_service` (SessionService): The session service object for managing flow sessions. ### Returns: - A `RunResponse` object containing the selected outputs (or all if not specified) of the executed flow and the session ID. The structure of the response accommodates multiple inputs, providing a nested list of outputs for each input. + A `RunResponse` object containing the selected outputs (or all if not specified) of the executed flow + and the session ID. + The structure of the response accommodates multiple inputs, providing a nested list of outputs for each input. ### Raises: - HTTPException: Indicates issues with finding the specified flow, invalid input formats, or internal errors during flow execution. + HTTPException: Indicates issues with finding the specified flow, invalid input formats, or internal errors during + flow execution. ### Example usage: ```json @@ -406,33 +430,57 @@ async def experimental_run_flow( } ``` - This endpoint facilitates complex flow executions with customized inputs, outputs, and configurations, catering to diverse application requirements. - """ - try: - flow_id_str = str(flow_id) - if outputs is None: - outputs = [] - - artifacts = {} - if session_id: + This endpoint facilitates complex flow executions with customized inputs, outputs, and configurations, + catering to diverse application requirements. + """ # noqa: E501 + session_service = get_session_service() + flow_id_str = str(flow_id) + if outputs is None: + outputs = [] + if inputs is None: + inputs = [InputValueRequest(components=[], input_value="")] + + if session_id: + try: session_data = await session_service.load_session(session_id, flow_id=flow_id_str) - graph, artifacts = session_data if session_data else (None, None) - if graph is None: - raise ValueError(f"Session {session_id} not found") - else: + except Exception as exc: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + graph, _artifacts = session_data or (None, None) + if graph is None: + msg = f"Session {session_id} not found" + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=msg) + else: + try: # Get the flow that matches the flow_id and belongs to the user # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() flow = session.exec( select(Flow).where(Flow.id == flow_id_str).where(Flow.user_id == api_key_user.id) ).first() - if flow is None: - raise ValueError(f"Flow {flow_id_str} not found") + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + logger.error(f"Flow ID {flow_id_str} is not a valid UUID") + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + except Exception as exc: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + + if flow is None: + msg = f"Flow {flow_id_str} not found" + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=msg) - if flow.data is None: - raise ValueError(f"Flow {flow_id_str} has no data") + if flow.data is None: + msg = f"Flow {flow_id_str} has no data" + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=msg) + try: graph_data = flow.data graph_data = process_tweaks(graph_data, tweaks or {}) graph = Graph.from_payload(graph_data, flow_id=flow_id_str) + except Exception as exc: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + + try: task_result, session_id = await run_graph_internal( graph=graph, flow_id=flow_id_str, @@ -441,56 +489,25 @@ async def experimental_run_flow( outputs=outputs, stream=stream, ) - - return RunResponse(outputs=task_result, session_id=session_id) - except sa.exc.StatementError as exc: - # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') - if "badly formed hexadecimal UUID string" in str(exc): - logger.error(f"Flow ID {flow_id_str} is not a valid UUID") - # This means the Flow ID is not a valid UUID which means it can't find the flow - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - except ValueError as exc: - if f"Flow {flow_id_str} not found" in str(exc): - logger.error(f"Flow {flow_id_str} not found") - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - elif f"Session {session_id} not found" in str(exc): - logger.error(f"Session {session_id} not found") - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - else: - logger.exception(exc) - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + return RunResponse(outputs=task_result, session_id=session_id) + @router.post( "/predict/{flow_id}", - response_model=ProcessResponse, dependencies=[Depends(api_key_security)], ) @router.post( "/process/{flow_id}", - response_model=ProcessResponse, + dependencies=[Depends(api_key_security)], ) -async def process( - session: Annotated[Session, Depends(get_session)], - flow_id: str, - inputs: Optional[Union[List[dict], dict]] = None, - tweaks: Optional[dict] = None, - clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 - session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 - task_service: "TaskService" = Depends(get_task_service), - api_key_user: UserRead = Depends(api_key_security), - sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821 - session_service: SessionService = Depends(get_session_service), -): - """ - Endpoint to process an input with a given flow_id. - """ +async def process() -> None: + """Endpoint to process an input with a given flow_id.""" # Raise a depreciation warning logger.warning( - "The /process endpoint is deprecated and will be removed in a future version. " "Please use /run instead." + "The /process endpoint is deprecated and will be removed in a future version. Please use /run instead." ) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, @@ -499,8 +516,8 @@ async def process( ) -@router.get("/task/{task_id}", response_model=TaskStatusResponse) -async def get_task_status(task_id: str): +@router.get("/task/{task_id}") +async def get_task_status(task_id: str) -> TaskStatusResponse: task_service = get_task_service() task = task_service.get_task(task_id) result = None @@ -526,37 +543,36 @@ async def get_task_status(task_id: str): @router.post( "/upload/{flow_id}", - response_model=UploadFileResponse, status_code=HTTPStatus.CREATED, ) async def create_upload_file( file: UploadFile, flow_id: UUID, -): +) -> UploadFileResponse: try: flow_id_str = str(flow_id) file_path = save_uploaded_file(file, folder_name=flow_id_str) return UploadFileResponse( - flowId=flow_id_str, + flow_id=flow_id_str, file_path=file_path, ) except Exception as exc: - logger.error(f"Error saving file: {exc}") + logger.exception("Error saving file") raise HTTPException(status_code=500, detail=str(exc)) from exc # get endpoint to return version of langflow @router.get("/version") -def get_version(): +async def get_version(): return get_version_info() -@router.post("/custom_component", status_code=HTTPStatus.OK, response_model=CustomComponentResponse) +@router.post("/custom_component", status_code=HTTPStatus.OK) async def custom_component( raw_code: CustomComponentRequest, - user: User = Depends(get_current_active_user), -): + user: CurrentActiveUser, +) -> CustomComponentResponse: component = Component(_code=raw_code.code) built_frontend_node, component_instance = build_custom_component_template(component, user_id=user.id) @@ -570,13 +586,13 @@ async def custom_component( @router.post("/custom_component/update", status_code=HTTPStatus.OK) async def custom_component_update( code_request: UpdateCustomComponentRequest, - user: User = Depends(get_current_active_user), + user: CurrentActiveUser, ): - """ - Update a custom component with the provided code request. + """Update a custom component with the provided code request. This endpoint generates the CustomComponentFrontendNode normally but then runs the `update_build_config` method - on the latest version of the template. This ensures that every time it runs, it has the latest version of the template. + on the latest version of the template. + This ensures that every time it runs, it has the latest version of the template. Args: code_request (CustomComponentRequest): The code request containing the updated code for the custom component. @@ -617,20 +633,28 @@ async def custom_component_update( field_name=code_request.field, ) component_node["template"] = updated_build_config + if isinstance(cc_instance, Component): + cc_instance.run_and_validate_update_outputs( + frontend_node=component_node, + field_name=code_request.field, + field_value=code_request.field_value, + ) - return component_node except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=400, detail=str(exc)) from exc + return component_node @router.get("/config", response_model=ConfigResponse) -def get_config(): +async def get_config(): try: from langflow.services.deps import get_settings_service - settings_service: "SettingsService" = get_settings_service() # type: ignore - return settings_service.settings.model_dump() + settings_service: SettingsService = get_settings_service() + + return { + "feature_flags": FEATURE_FLAGS, + **settings_service.settings.model_dump(), + } except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc diff --git a/src/backend/base/langflow/api/v1/files.py b/src/backend/base/langflow/api/v1/files.py index 5d9c89970ee2..f04e773822de 100644 --- a/src/backend/base/langflow/api/v1/files.py +++ b/src/backend/base/langflow/api/v1/files.py @@ -1,18 +1,18 @@ -from datetime import datetime import hashlib +from datetime import datetime, timezone from http import HTTPStatus from io import BytesIO -from uuid import UUID from pathlib import Path +from typing import Annotated +from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, UploadFile from fastapi.responses import StreamingResponse - +from langflow.api.utils import AsyncDbSession, CurrentActiveUser from langflow.api.v1.schemas import UploadFileResponse -from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import Flow -from langflow.services.deps import get_session, get_storage_service +from langflow.services.deps import get_settings_service, get_storage_service from langflow.services.storage.service import StorageService from langflow.services.storage.utils import build_content_type_from_extension @@ -22,14 +22,14 @@ # Create dep that gets the flow_id from the request # then finds it in the database and returns it while # using the current user as the owner -def get_flow_id( +async def get_flow_id( flow_id: UUID, - current_user=Depends(get_current_active_user), - session=Depends(get_session), + current_user: CurrentActiveUser, + session: AsyncDbSession, ): flow_id_str = str(flow_id) # AttributeError: 'SelectOfScalar' object has no attribute 'first' - flow = session.get(Flow, flow_id_str) + flow = await session.get(Flow, flow_id_str) if not flow: raise HTTPException(status_code=404, detail="Flow not found") if flow.user_id != current_user.id: @@ -39,37 +39,62 @@ def get_flow_id( @router.post("/upload/{flow_id}", status_code=HTTPStatus.CREATED) async def upload_file( + *, file: UploadFile, - flow_id: UUID = Depends(get_flow_id), - storage_service: StorageService = Depends(get_storage_service), -): + flow_id: Annotated[UUID, Depends(get_flow_id)], + current_user: CurrentActiveUser, + session: AsyncDbSession, + storage_service: Annotated[StorageService, Depends(get_storage_service)], +) -> UploadFileResponse: + try: + max_file_size_upload = get_settings_service().settings.max_file_size_upload + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if file.size > max_file_size_upload * 1024 * 1024: + raise HTTPException( + status_code=413, detail=f"File size is larger than the maximum file size {max_file_size_upload}MB." + ) + try: flow_id_str = str(flow_id) + flow = await session.get(Flow, flow_id_str) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if flow.user_id != current_user.id: + raise HTTPException(status_code=403, detail="You don't have access to this flow") + + try: file_content = await file.read() - timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + timestamp = datetime.now(tz=timezone.utc).astimezone().strftime("%Y-%m-%d_%H-%M-%S") file_name = file.filename or hashlib.sha256(file_content).hexdigest() full_file_name = f"{timestamp}_{file_name}" folder = flow_id_str await storage_service.save_file(flow_id=folder, file_name=full_file_name, data=file_content) - return UploadFileResponse(flowId=flow_id_str, file_path=f"{folder}/{full_file_name}") + return UploadFileResponse(flow_id=flow_id_str, file_path=f"{folder}/{full_file_name}") except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/download/{flow_id}/{file_name}") -async def download_file(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)): - try: - flow_id_str = str(flow_id) - extension = file_name.split(".")[-1] - - if not extension: - raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") +async def download_file( + file_name: str, flow_id: UUID, storage_service: Annotated[StorageService, Depends(get_storage_service)] +): + flow_id_str = str(flow_id) + extension = file_name.split(".")[-1] + if not extension: + raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") + try: content_type = build_content_type_from_extension(extension) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e - if not content_type: - raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") + if not content_type: + raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") + try: file_content = await storage_service.get_file(flow_id=flow_id_str, file_name=file_name) headers = { "Content-Disposition": f"attachment; filename={file_name} filename*=UTF-8''{file_name}", @@ -78,90 +103,99 @@ async def download_file(file_name: str, flow_id: UUID, storage_service: StorageS } return StreamingResponse(BytesIO(file_content), media_type=content_type, headers=headers) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/images/{flow_id}/{file_name}") -async def download_image(file_name: str, flow_id: UUID, storage_service: StorageService = Depends(get_storage_service)): - try: - extension = file_name.split(".")[-1] - flow_id_str = str(flow_id) - - if not extension: - raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") +async def download_image(file_name: str, flow_id: UUID): + storage_service = get_storage_service() + extension = file_name.split(".")[-1] + flow_id_str = str(flow_id) + if not extension: + raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") + try: content_type = build_content_type_from_extension(extension) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e - if not content_type: - raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") - elif not content_type.startswith("image"): - raise HTTPException(status_code=500, detail=f"Content type {content_type} is not an image") + if not content_type: + raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") + if not content_type.startswith("image"): + raise HTTPException(status_code=500, detail=f"Content type {content_type} is not an image") + try: file_content = await storage_service.get_file(flow_id=flow_id_str, file_name=file_name) return StreamingResponse(BytesIO(file_content), media_type=content_type) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/profile_pictures/{folder_name}/{file_name}") async def download_profile_picture( folder_name: str, file_name: str, - storage_service: StorageService = Depends(get_storage_service), ): try: + storage_service = get_storage_service() extension = file_name.split(".")[-1] - config_dir = get_storage_service().settings_service.settings.config_dir - config_path = Path(config_dir) # type: ignore + config_dir = storage_service.settings_service.settings.config_dir + config_path = Path(config_dir) # type: ignore[arg-type] folder_path = config_path / "profile_pictures" / folder_name content_type = build_content_type_from_extension(extension) - file_content = await storage_service.get_file(flow_id=folder_path, file_name=file_name) # type: ignore + file_content = await storage_service.get_file(flow_id=folder_path, file_name=file_name) # type: ignore[arg-type] return StreamingResponse(BytesIO(file_content), media_type=content_type) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/profile_pictures/list") -async def list_profile_pictures(storage_service: StorageService = Depends(get_storage_service)): +async def list_profile_pictures(): try: - config_dir = get_storage_service().settings_service.settings.config_dir - config_path = Path(config_dir) # type: ignore + storage_service = get_storage_service() + config_dir = storage_service.settings_service.settings.config_dir + config_path = Path(config_dir) # type: ignore[arg-type] people_path = config_path / "profile_pictures/People" space_path = config_path / "profile_pictures/Space" - people = await storage_service.list_files(flow_id=people_path) # type: ignore - space = await storage_service.list_files(flow_id=space_path) # type: ignore + people = await storage_service.list_files(flow_id=people_path) # type: ignore[arg-type] + space = await storage_service.list_files(flow_id=space_path) # type: ignore[arg-type] - files = [f"People/{i}" for i in people] - files += [f"Space/{i}" for i in space] + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e - return {"files": files} + files = [f"People/{i}" for i in people] + files += [f"Space/{i}" for i in space] - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + return {"files": files} @router.get("/list/{flow_id}") async def list_files( - flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) + flow_id: Annotated[UUID, Depends(get_flow_id)], + storage_service: Annotated[StorageService, Depends(get_storage_service)], ): try: flow_id_str = str(flow_id) files = await storage_service.list_files(flow_id=flow_id_str) - return {"files": files} except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return {"files": files} @router.delete("/delete/{flow_id}/{file_name}") async def delete_file( - file_name: str, flow_id: UUID = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) + file_name: str, + flow_id: Annotated[UUID, Depends(get_flow_id)], + storage_service: Annotated[StorageService, Depends(get_storage_service)], ): try: flow_id_str = str(flow_id) await storage_service.delete_file(flow_id=flow_id_str, file_name=file_name) - return {"message": f"File {file_name} deleted successfully"} except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return {"message": f"File {file_name} deleted successfully"} diff --git a/src/backend/base/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py index ce03013f1f8e..7a999dea9c73 100644 --- a/src/backend/base/langflow/api/v1/flows.py +++ b/src/backend/base/langflow/api/v1/flows.py @@ -1,56 +1,66 @@ +from __future__ import annotations + import io import json import re import zipfile from datetime import datetime, timezone -from typing import List +from typing import Annotated from uuid import UUID import orjson from fastapi import APIRouter, Depends, File, HTTPException, UploadFile from fastapi.encoders import jsonable_encoder from fastapi.responses import StreamingResponse -from loguru import logger -from sqlmodel import Session, and_, col, select - -from langflow.api.utils import remove_api_keys, validate_is_component +from fastapi_pagination import Page, Params, add_pagination +from fastapi_pagination.ext.sqlalchemy import paginate +from sqlmodel import and_, col, select +from sqlmodel.ext.asyncio.session import AsyncSession + +from langflow.api.utils import ( + AsyncDbSession, + CurrentActiveUser, + cascade_delete_flow, + remove_api_keys, + validate_is_component, +) from langflow.api.v1.schemas import FlowListCreate from langflow.initial_setup.setup import STARTER_FOLDER_NAME -from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate -from langflow.services.database.models.flow.utils import delete_flow_by_id, get_webhook_component_in_flow +from langflow.services.database.models.flow.model import FlowHeader +from langflow.services.database.models.flow.utils import get_webhook_component_in_flow from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME from langflow.services.database.models.folder.model import Folder from langflow.services.database.models.transactions.crud import get_transactions_by_flow_id -from langflow.services.database.models.user.model import User from langflow.services.database.models.vertex_builds.crud import get_vertex_builds_by_flow_id -from langflow.services.deps import get_session, get_settings_service +from langflow.services.deps import get_settings_service from langflow.services.settings.service import SettingsService # build router router = APIRouter(prefix="/flows", tags=["Flows"]) -@router.post("/", response_model=FlowRead, status_code=201) -def create_flow( +async def _new_flow( *, - session: Session = Depends(get_session), + session: AsyncSession, flow: FlowCreate, - current_user: User = Depends(get_current_active_user), + user_id: UUID, ): try: """Create a new flow.""" if flow.user_id is None: - flow.user_id = current_user.id + flow.user_id = user_id # First check if the flow.name is unique # there might be flows with name like: "MyFlow", "MyFlow (1)", "MyFlow (2)" # so we need to check if the name is unique with `like` operator # if we find a flow with the same name, we add a number to the end of the name # based on the highest number found - if session.exec(select(Flow).where(Flow.name == flow.name).where(Flow.user_id == current_user.id)).first(): - flows = session.exec( - select(Flow).where(Flow.name.like(f"{flow.name} (%")).where(Flow.user_id == current_user.id) # type: ignore + if (await session.exec(select(Flow).where(Flow.name == flow.name).where(Flow.user_id == user_id))).first(): + flows = ( + await session.exec( + select(Flow).where(Flow.name.like(f"{flow.name} (%")).where(Flow.user_id == user_id) # type: ignore[attr-defined] + ) ).all() if flows: extract_number = re.compile(r"\((\d+)\)$") @@ -66,20 +76,24 @@ def create_flow( # Now check if the endpoint is unique if ( flow.endpoint_name - and session.exec( - select(Flow).where(Flow.endpoint_name == flow.endpoint_name).where(Flow.user_id == current_user.id) + and ( + await session.exec( + select(Flow).where(Flow.endpoint_name == flow.endpoint_name).where(Flow.user_id == user_id) + ) ).first() ): - flows = session.exec( - select(Flow) - .where(Flow.endpoint_name.like(f"{flow.endpoint_name}-%")) # type: ignore - .where(Flow.user_id == current_user.id) + flows = ( + await session.exec( + select(Flow) + .where(Flow.endpoint_name.like(f"{flow.endpoint_name}-%")) # type: ignore[union-attr] + .where(Flow.user_id == user_id) + ) ).all() if flows: - # The endpoitn name is like "my-endpoint","my-endpoint-1", "my-endpoint-2" + # The endpoint name is like "my-endpoint","my-endpoint-1", "my-endpoint-2" # so we need to get the highest number and add 1 # we need to get the last part of the endpoint name - numbers = [int(flow.endpoint_name.split("-")[-1]) for flow in flows] # type: ignore + numbers = [int(flow.endpoint_name.split("-")[-1]) for flow in flows] flow.endpoint_name = f"{flow.endpoint_name}-{max(numbers) + 1}" else: flow.endpoint_name = f"{flow.endpoint_name}-1" @@ -89,21 +103,37 @@ def create_flow( if db_flow.folder_id is None: # Make sure flows always have a folder - default_folder = session.exec( - select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME, Folder.user_id == current_user.id) + default_folder = ( + await session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME, Folder.user_id == user_id)) ).first() if default_folder: db_flow.folder_id = default_folder.id session.add(db_flow) - session.commit() - session.refresh(db_flow) - return db_flow except Exception as e: # If it is a validation error, return the error message if hasattr(e, "errors"): raise HTTPException(status_code=400, detail=str(e)) from e - elif "UNIQUE constraint failed" in str(e): + if isinstance(e, HTTPException): + raise + raise HTTPException(status_code=500, detail=str(e)) from e + + return db_flow + + +@router.post("/", response_model=FlowRead, status_code=201) +async def create_flow( + *, + session: AsyncDbSession, + flow: FlowCreate, + current_user: CurrentActiveUser, +): + try: + db_flow = await _new_flow(session=session, flow=flow, user_id=current_user.id) + await session.commit() + await session.refresh(db_flow) + except Exception as e: + if "UNIQUE constraint failed" in str(e): # Get the name of the column that failed columns = str(e).split("UNIQUE constraint failed: ")[1].split(".")[1].split("\n")[0] # UNIQUE constraint failed: flow.user_id, flow.name @@ -114,70 +144,101 @@ def create_flow( raise HTTPException( status_code=400, detail=f"{column.capitalize().replace('_', ' ')} must be unique" ) from e - elif isinstance(e, HTTPException): - raise e - else: - raise HTTPException(status_code=500, detail=str(e)) from e + if isinstance(e, HTTPException): + raise + raise HTTPException(status_code=500, detail=str(e)) from e + return db_flow -@router.get("/", response_model=list[FlowRead], status_code=200) -def read_flows( +@router.get("/", response_model=list[FlowRead] | Page[FlowRead] | list[FlowHeader], status_code=200) +async def read_flows( *, - current_user: User = Depends(get_current_active_user), - session: Session = Depends(get_session), - settings_service: "SettingsService" = Depends(get_settings_service), + current_user: CurrentActiveUser, + session: AsyncDbSession, remove_example_flows: bool = False, + components_only: bool = False, + get_all: bool = True, + folder_id: UUID | None = None, + params: Annotated[Params, Depends()], + header_flows: bool = False, ): - """ - Retrieve a list of flows. + """Retrieve a list of flows with pagination support. Args: current_user (User): The current authenticated user. session (Session): The database session. settings_service (SettingsService): The settings service. - remove_example_flows (bool, optional): Whether to remove example flows. Defaults to False. + components_only (bool, optional): Whether to return only components. Defaults to False. + get_all (bool, optional): Whether to return all flows without pagination. Defaults to True. + **This field must be True because of backward compatibility with the frontend - Release: 1.0.20** + + folder_id (UUID, optional): The folder ID. Defaults to None. + params (Params): Pagination parameters. + remove_example_flows (bool, optional): Whether to remove example flows. Defaults to False. + header_flows (bool, optional): Whether to return only specific headers of the flows. Defaults to False. Returns: - List[Dict]: A list of flows in JSON format. + list[FlowRead] | Page[FlowRead] | list[FlowHeader] + A list of flows or a paginated response containing the list of flows or a list of flow headers. """ - try: - auth_settings = settings_service.auth_settings + auth_settings = get_settings_service().auth_settings + + default_folder = (await session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME))).first() + default_folder_id = default_folder.id if default_folder else None + + starter_folder = (await session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME))).first() + starter_folder_id = starter_folder.id if starter_folder else None + + if not starter_folder and not default_folder: + raise HTTPException( + status_code=404, + detail="Starter folder and default folder not found. Please create a folder and add flows to it.", + ) + + if not folder_id: + folder_id = default_folder_id + if auth_settings.AUTO_LOGIN: - flows = session.exec( - select(Flow).where( - (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa - ) - ).all() + stmt = select(Flow).where( + (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa: E711 + ) else: - flows = current_user.flows - - flows = validate_is_component(flows) # type: ignore - flow_ids = [flow.id for flow in flows] - # with the session get the flows that DO NOT have a user_id - if not remove_example_flows: - try: - folder = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() - - example_flows = folder.flows if folder else [] - for example_flow in example_flows: - if example_flow.id not in flow_ids: - flows.append(example_flow) # type: ignore - except Exception as e: - logger.error(e) + stmt = select(Flow).where(Flow.user_id == current_user.id) + + if remove_example_flows: + stmt = stmt.where(Flow.folder_id != starter_folder_id) + + if components_only: + stmt = stmt.where(Flow.is_component == True) # noqa: E712 + + if get_all: + flows = (await session.exec(stmt)).all() + flows = validate_is_component(flows) + if components_only: + flows = [flow for flow in flows if flow.is_component] + if remove_example_flows and starter_folder_id: + flows = [flow for flow in flows if flow.folder_id != starter_folder_id] + if header_flows: + return [ + {"id": flow.id, "name": flow.name, "folder_id": flow.folder_id, "is_component": flow.is_component} + for flow in flows + ] + return flows + + stmt = stmt.where(Flow.folder_id == folder_id) + return await paginate(session, stmt, params=params) + except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e - return [jsonable_encoder(flow) for flow in flows] -@router.get("/{flow_id}", response_model=FlowRead, status_code=200) -def read_flow( - *, - session: Session = Depends(get_session), +async def _read_flow( + session: AsyncSession, flow_id: UUID, - current_user: User = Depends(get_current_active_user), - settings_service: "SettingsService" = Depends(get_settings_service), + user_id: UUID, + settings_service: SettingsService, ): """Read a flow.""" auth_settings = settings_service.auth_settings @@ -186,33 +247,48 @@ def read_flow( # If auto login is enable user_id can be current_user.id or None # so write an OR stmt = stmt.where( - (Flow.user_id == current_user.id) | (Flow.user_id == None) # noqa - ) # noqa - if user_flow := session.exec(stmt).first(): + (Flow.user_id == user_id) | (Flow.user_id == None) # noqa: E711 + ) + return (await session.exec(stmt)).first() + + +@router.get("/{flow_id}", response_model=FlowRead, status_code=200) +async def read_flow( + *, + session: AsyncDbSession, + flow_id: UUID, + current_user: CurrentActiveUser, +): + """Read a flow.""" + if user_flow := await _read_flow(session, flow_id, current_user.id, get_settings_service()): return user_flow - else: - raise HTTPException(status_code=404, detail="Flow not found") + raise HTTPException(status_code=404, detail="Flow not found") @router.patch("/{flow_id}", response_model=FlowRead, status_code=200) -def update_flow( +async def update_flow( *, - session: Session = Depends(get_session), + session: AsyncDbSession, flow_id: UUID, flow: FlowUpdate, - current_user: User = Depends(get_current_active_user), - settings_service=Depends(get_settings_service), + current_user: CurrentActiveUser, ): """Update a flow.""" + settings_service = get_settings_service() try: - db_flow = read_flow( + db_flow = await _read_flow( session=session, flow_id=flow_id, - current_user=current_user, + user_id=current_user.id, settings_service=settings_service, ) - if not db_flow: - raise HTTPException(status_code=404, detail="Flow not found") + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if not db_flow: + raise HTTPException(status_code=404, detail="Flow not found") + + try: flow_data = flow.model_dump(exclude_unset=True) if settings_service.settings.remove_api_keys: flow_data = remove_api_keys(flow_data) @@ -223,18 +299,17 @@ def update_flow( db_flow.updated_at = datetime.now(timezone.utc) if db_flow.folder_id is None: - default_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() + default_folder = (await session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME))).first() if default_folder: db_flow.folder_id = default_folder.id session.add(db_flow) - session.commit() - session.refresh(db_flow) - return db_flow + await session.commit() + await session.refresh(db_flow) except Exception as e: # If it is a validation error, return the error message if hasattr(e, "errors"): raise HTTPException(status_code=400, detail=str(e)) from e - elif "UNIQUE constraint failed" in str(e): + if "UNIQUE constraint failed" in str(e): # Get the name of the column that failed columns = str(e).split("UNIQUE constraint failed: ")[1].split(".")[1].split("\n")[0] # UNIQUE constraint failed: flow.user_id, flow.name @@ -245,40 +320,38 @@ def update_flow( raise HTTPException( status_code=400, detail=f"{column.capitalize().replace('_', ' ')} must be unique" ) from e - elif isinstance(e, HTTPException): - raise e - else: - raise HTTPException(status_code=500, detail=str(e)) from e + raise HTTPException(status_code=500, detail=str(e)) from e + + return db_flow @router.delete("/{flow_id}", status_code=200) -def delete_flow( +async def delete_flow( *, - session: Session = Depends(get_session), + session: AsyncDbSession, flow_id: UUID, - current_user: User = Depends(get_current_active_user), - settings_service=Depends(get_settings_service), + current_user: CurrentActiveUser, ): """Delete a flow.""" - flow = read_flow( + flow = await _read_flow( session=session, flow_id=flow_id, - current_user=current_user, - settings_service=settings_service, + user_id=current_user.id, + settings_service=get_settings_service(), ) if not flow: raise HTTPException(status_code=404, detail="Flow not found") - delete_flow_by_id(str(flow_id), session) - session.commit() + await cascade_delete_flow(session, flow.id) + await session.commit() return {"message": "Flow deleted successfully"} -@router.post("/batch/", response_model=List[FlowRead], status_code=201) -def create_flows( +@router.post("/batch/", response_model=list[FlowRead], status_code=201) +async def create_flows( *, - session: Session = Depends(get_session), + session: AsyncDbSession, flow_list: FlowListCreate, - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, ): """Create multiple new flows.""" db_flows = [] @@ -287,82 +360,102 @@ def create_flows( db_flow = Flow.model_validate(flow, from_attributes=True) session.add(db_flow) db_flows.append(db_flow) - session.commit() + await session.commit() for db_flow in db_flows: - session.refresh(db_flow) + await session.refresh(db_flow) return db_flows -@router.post("/upload/", response_model=List[FlowRead], status_code=201) +@router.post("/upload/", response_model=list[FlowRead], status_code=201) async def upload_file( *, - session: Session = Depends(get_session), - file: UploadFile = File(...), - current_user: User = Depends(get_current_active_user), + session: AsyncDbSession, + file: Annotated[UploadFile, File(...)], + current_user: CurrentActiveUser, folder_id: UUID | None = None, ): """Upload flows from a file.""" contents = await file.read() data = orjson.loads(contents) response_list = [] - if "flows" in data: - flow_list = FlowListCreate(**data) - else: - flow_list = FlowListCreate(flows=[FlowCreate(**data)]) + flow_list = FlowListCreate(**data) if "flows" in data else FlowListCreate(flows=[FlowCreate(**data)]) # Now we set the user_id for all flows for flow in flow_list.flows: flow.user_id = current_user.id if folder_id: flow.folder_id = folder_id - response = create_flow(session=session, flow=flow, current_user=current_user) + response = await _new_flow(session=session, flow=flow, user_id=current_user.id) response_list.append(response) + try: + await session.commit() + for db_flow in response_list: + await session.refresh(db_flow) + except Exception as e: + if "UNIQUE constraint failed" in str(e): + # Get the name of the column that failed + columns = str(e).split("UNIQUE constraint failed: ")[1].split(".")[1].split("\n")[0] + # UNIQUE constraint failed: flow.user_id, flow.name + # or UNIQUE constraint failed: flow.name + # if the column has id in it, we want the other column + column = columns.split(",")[1] if "id" in columns.split(",")[0] else columns.split(",")[0] + + raise HTTPException( + status_code=400, detail=f"{column.capitalize().replace('_', ' ')} must be unique" + ) from e + if isinstance(e, HTTPException): + raise + raise HTTPException(status_code=500, detail=str(e)) from e + return response_list @router.delete("/") async def delete_multiple_flows( - flow_ids: List[UUID], user: User = Depends(get_current_active_user), db: Session = Depends(get_session) + flow_ids: list[UUID], + user: CurrentActiveUser, + db: AsyncDbSession, ): - """ - Delete multiple flows by their IDs. + """Delete multiple flows by their IDs. Args: flow_ids (List[str]): The list of flow IDs to delete. user (User, optional): The user making the request. Defaults to the current active user. + db (Session, optional): The database session. Returns: dict: A dictionary containing the number of flows deleted. """ try: - flows_to_delete = db.exec(select(Flow).where(col(Flow.id).in_(flow_ids)).where(Flow.user_id == user.id)).all() + flows_to_delete = ( + await db.exec(select(Flow).where(col(Flow.id).in_(flow_ids)).where(Flow.user_id == user.id)) + ).all() for flow in flows_to_delete: - transactions_to_delete = get_transactions_by_flow_id(db, flow.id) + transactions_to_delete = await get_transactions_by_flow_id(db, flow.id) for transaction in transactions_to_delete: - db.delete(transaction) + await db.delete(transaction) - builds_to_delete = get_vertex_builds_by_flow_id(db, flow.id) + builds_to_delete = await get_vertex_builds_by_flow_id(db, flow.id) for build in builds_to_delete: - db.delete(build) + await db.delete(build) - db.delete(flow) + await db.delete(flow) - db.commit() + await db.commit() return {"deleted": len(flows_to_delete)} except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc @router.post("/download/", status_code=200) async def download_multiple_file( - flow_ids: List[UUID], - user: User = Depends(get_current_active_user), - db: Session = Depends(get_session), + flow_ids: list[UUID], + user: CurrentActiveUser, + db: AsyncDbSession, ): """Download all flows as a zip file.""" - flows = db.exec(select(Flow).where(and_(Flow.user_id == user.id, Flow.id.in_(flow_ids)))).all() # type: ignore + flows = (await db.exec(select(Flow).where(and_(Flow.user_id == user.id, Flow.id.in_(flow_ids))))).all() # type: ignore[attr-defined] if not flows: raise HTTPException(status_code=404, detail="No flows found.") @@ -386,7 +479,7 @@ async def download_multiple_file( zip_stream.seek(0) # Generate the filename with the current datetime - current_time = datetime.now().strftime("%Y%m%d_%H%M%S") + current_time = datetime.now(tz=timezone.utc).astimezone().strftime("%Y%m%d_%H%M%S") filename = f"{current_time}_langflow_flows.zip" return StreamingResponse( @@ -394,5 +487,34 @@ async def download_multiple_file( media_type="application/x-zip-compressed", headers={"Content-Disposition": f"attachment; filename={filename}"}, ) - else: - return flows_without_api_keys[0] + return flows_without_api_keys[0] + + +@router.get("/basic_examples/", response_model=list[FlowRead], status_code=200) +async def read_basic_examples( + *, + session: AsyncDbSession, +): + """Retrieve a list of basic example flows. + + Args: + session (Session): The database session. + + Returns: + list[FlowRead]: A list of basic example flows. + """ + try: + # Get the starter folder + starter_folder = (await session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME))).first() + + if not starter_folder: + return [] + + # Get all flows in the starter folder + return (await session.exec(select(Flow).where(Flow.folder_id == starter_folder.id))).all() + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + +add_pagination(router) diff --git a/src/backend/base/langflow/api/v1/folders.py b/src/backend/base/langflow/api/v1/folders.py index d15d3a4ab011..910cfa19bbd4 100644 --- a/src/backend/base/langflow/api/v1/folders.py +++ b/src/backend/base/langflow/api/v1/folders.py @@ -1,15 +1,19 @@ -from typing import List +from typing import Annotated import orjson from fastapi import APIRouter, Depends, File, HTTPException, Response, UploadFile, status +from fastapi_pagination import Params +from fastapi_pagination.ext.sqlmodel import paginate from sqlalchemy import or_, update -from sqlmodel import Session, select +from sqlalchemy.orm import selectinload +from sqlmodel import select +from langflow.api.utils import AsyncDbSession, CurrentActiveUser, cascade_delete_flow, custom_params from langflow.api.v1.flows import create_flows from langflow.api.v1.schemas import FlowListCreate, FlowListReadWithFolderName from langflow.helpers.flow import generate_unique_flow_name from langflow.helpers.folders import generate_unique_folder_name -from langflow.services.auth.utils import get_current_active_user +from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.database.models.flow.model import Flow, FlowCreate, FlowRead from langflow.services.database.models.folder.constants import DEFAULT_FOLDER_NAME from langflow.services.database.models.folder.model import ( @@ -19,18 +23,17 @@ FolderReadWithFlows, FolderUpdate, ) -from langflow.services.database.models.user.model import User -from langflow.services.deps import get_session +from langflow.services.database.models.folder.pagination_model import FolderWithPaginatedFlows router = APIRouter(prefix="/folders", tags=["Folders"]) @router.post("/", response_model=FolderRead, status_code=201) -def create_folder( +async def create_folder( *, - session: Session = Depends(get_session), + session: AsyncDbSession, folder: FolderCreate, - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, ): try: new_folder = Folder.model_validate(folder, from_attributes=True) @@ -40,12 +43,14 @@ def create_folder( # so we need to check if the name is unique with `like` operator # if we find a flow with the same name, we add a number to the end of the name # based on the highest number found - if session.exec( - statement=select(Folder).where(Folder.name == new_folder.name).where(Folder.user_id == current_user.id) + if ( + await session.exec( + statement=select(Folder).where(Folder.name == new_folder.name).where(Folder.user_id == current_user.id) + ) ).first(): - folder_results = session.exec( + folder_results = await session.exec( select(Folder).where( - Folder.name.like(f"{new_folder.name}%"), # type: ignore + Folder.name.like(f"{new_folder.name}%"), # type: ignore[attr-defined] Folder.user_id == current_user.id, ) ) @@ -58,165 +63,220 @@ def create_folder( new_folder.name = f"{new_folder.name} (1)" session.add(new_folder) - session.commit() - session.refresh(new_folder) + await session.commit() + await session.refresh(new_folder) if folder.components_list: update_statement_components = ( - update(Flow).where(Flow.id.in_(folder.components_list)).values(folder_id=new_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(folder.components_list)).values(folder_id=new_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_components) # type: ignore - session.commit() + await session.exec(update_statement_components) + await session.commit() if folder.flows_list: - update_statement_flows = update(Flow).where(Flow.id.in_(folder.flows_list)).values(folder_id=new_folder.id) # type: ignore - session.exec(update_statement_flows) # type: ignore - session.commit() + update_statement_flows = update(Flow).where(Flow.id.in_(folder.flows_list)).values(folder_id=new_folder.id) # type: ignore[attr-defined] + await session.exec(update_statement_flows) + await session.commit() - return new_folder except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return new_folder -@router.get("/", response_model=List[FolderRead], status_code=200) -def read_folders( +@router.get("/", response_model=list[FolderRead], status_code=200) +async def read_folders( *, - session: Session = Depends(get_session), - current_user: User = Depends(get_current_active_user), + session: AsyncDbSession, + current_user: CurrentActiveUser, ): try: - folders = session.exec( - select(Folder).where( - or_(Folder.user_id == current_user.id, Folder.user_id == None) # type: ignore # noqa: E711 + folders = ( + await session.exec( + select(Folder).where( + or_(Folder.user_id == current_user.id, Folder.user_id == None) # noqa: E711 + ) ) ).all() - sorted_folders = sorted(folders, key=lambda x: x.name != DEFAULT_FOLDER_NAME) - return sorted_folders + folders = [folder for folder in folders if folder.name != STARTER_FOLDER_NAME] + return sorted(folders, key=lambda x: x.name != DEFAULT_FOLDER_NAME) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e -@router.get("/{folder_id}", response_model=FolderReadWithFlows, status_code=200) -def read_folder( +@router.get("/{folder_id}", response_model=FolderWithPaginatedFlows | FolderReadWithFlows, status_code=200) +async def read_folder( *, - session: Session = Depends(get_session), + session: AsyncDbSession, folder_id: str, - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, + params: Annotated[Params | None, Depends(custom_params)], + is_component: bool = False, + is_flow: bool = False, + search: str = "", ): try: - folder = session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)).first() - if not folder: - raise HTTPException(status_code=404, detail="Folder not found") - flows_from_current_user_in_folder = [flow for flow in folder.flows if flow.user_id == current_user.id] - folder.flows = flows_from_current_user_in_folder - return folder + folder = ( + await session.exec( + select(Folder) + .options(selectinload(Folder.flows)) + .where(Folder.id == folder_id, Folder.user_id == current_user.id) + ) + ).first() except Exception as e: if "No result found" in str(e): - raise HTTPException(status_code=404, detail="Folder not found") - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=404, detail="Folder not found") from e + raise HTTPException(status_code=500, detail=str(e)) from e + + if not folder: + raise HTTPException(status_code=404, detail="Folder not found") + + try: + if params and params.page and params.size: + stmt = select(Flow).where(Flow.folder_id == folder_id) + + if Flow.updated_at is not None: + stmt = stmt.order_by(Flow.updated_at.desc()) # type: ignore[attr-defined] + if is_component: + stmt = stmt.where(Flow.is_component == True) # noqa: E712 + if is_flow: + stmt = stmt.where(Flow.is_component == False) # noqa: E712 + if search: + stmt = stmt.where(Flow.name.like(f"%{search}%")) # type: ignore[attr-defined] + paginated_flows = await paginate(session, stmt, params=params) + + return FolderWithPaginatedFlows(folder=FolderRead.model_validate(folder), flows=paginated_flows) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + flows_from_current_user_in_folder = [flow for flow in folder.flows if flow.user_id == current_user.id] + folder.flows = flows_from_current_user_in_folder + return folder @router.patch("/{folder_id}", response_model=FolderRead, status_code=200) -def update_folder( +async def update_folder( *, - session: Session = Depends(get_session), + session: AsyncDbSession, folder_id: str, folder: FolderUpdate, # Assuming FolderUpdate is a Pydantic model defining updatable fields - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, ): try: - existing_folder = session.exec( - select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id) + existing_folder = ( + await session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)) ).first() - if not existing_folder: - raise HTTPException(status_code=404, detail="Folder not found") + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if not existing_folder: + raise HTTPException(status_code=404, detail="Folder not found") + + try: if folder.name and folder.name != existing_folder.name: existing_folder.name = folder.name session.add(existing_folder) - session.commit() - session.refresh(existing_folder) + await session.commit() + await session.refresh(existing_folder) return existing_folder folder_data = existing_folder.model_dump(exclude_unset=True) for key, value in folder_data.items(): - if key != "components" and key != "flows": + if key not in {"components", "flows"}: setattr(existing_folder, key, value) session.add(existing_folder) - session.commit() - session.refresh(existing_folder) + await session.commit() + await session.refresh(existing_folder) concat_folder_components = folder.components + folder.flows - flows_ids = session.exec(select(Flow.id).where(Flow.folder_id == existing_folder.id)).all() + flows_ids = (await session.exec(select(Flow.id).where(Flow.folder_id == existing_folder.id))).all() excluded_flows = list(set(flows_ids) - set(concat_folder_components)) - my_collection_folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME)).first() + my_collection_folder = (await session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME))).first() if my_collection_folder: update_statement_my_collection = ( - update(Flow).where(Flow.id.in_(excluded_flows)).values(folder_id=my_collection_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(excluded_flows)).values(folder_id=my_collection_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_my_collection) # type: ignore - session.commit() + await session.exec(update_statement_my_collection) + await session.commit() if concat_folder_components: update_statement_components = ( - update(Flow).where(Flow.id.in_(concat_folder_components)).values(folder_id=existing_folder.id) # type: ignore + update(Flow).where(Flow.id.in_(concat_folder_components)).values(folder_id=existing_folder.id) # type: ignore[attr-defined] ) - session.exec(update_statement_components) # type: ignore - session.commit() - - return existing_folder + await session.exec(update_statement_components) + await session.commit() except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return existing_folder @router.delete("/{folder_id}", status_code=204) -def delete_folder( +async def delete_folder( *, - session: Session = Depends(get_session), + session: AsyncDbSession, folder_id: str, - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, ): try: - folder = session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)).first() - if not folder: - raise HTTPException(status_code=404, detail="Folder not found") - session.delete(folder) - session.commit() - flows = session.exec(select(Flow).where(Flow.folder_id == folder_id, Folder.user_id == current_user.id)).all() - for flow in flows: - session.delete(flow) - session.commit() + flows = ( + await session.exec(select(Flow).where(Flow.folder_id == folder_id, Flow.user_id == current_user.id)) + ).all() + if len(flows) > 0: + for flow in flows: + await cascade_delete_flow(session, flow.id) + + folder = ( + await session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)) + ).first() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if not folder: + raise HTTPException(status_code=404, detail="Folder not found") + + try: + await session.delete(folder) + await session.commit() return Response(status_code=status.HTTP_204_NO_CONTENT) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/download/{folder_id}", response_model=FlowListReadWithFolderName, status_code=200) async def download_file( *, - session: Session = Depends(get_session), + session: AsyncDbSession, folder_id: str, - current_user: User = Depends(get_current_active_user), + current_user: CurrentActiveUser, ): """Download all flows from folder.""" try: - folder = session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)).first() - return folder + folder = ( + await session.exec(select(Folder).where(Folder.id == folder_id, Folder.user_id == current_user.id)) + ).first() except Exception as e: if "No result found" in str(e): - raise HTTPException(status_code=404, detail="Folder not found") - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=404, detail="Folder not found") from e + raise HTTPException(status_code=500, detail=str(e)) from e + + if not folder: + raise HTTPException(status_code=404, detail="Folder not found") + + return folder -@router.post("/upload/", response_model=List[FlowRead], status_code=201) +@router.post("/upload/", response_model=list[FlowRead], status_code=201) async def upload_file( *, - session: Session = Depends(get_session), - file: UploadFile = File(...), - current_user: User = Depends(get_current_active_user), + session: AsyncDbSession, + file: Annotated[UploadFile, File(...)], + current_user: CurrentActiveUser, ): """Upload flows from a file.""" contents = await file.read() @@ -225,7 +285,7 @@ async def upload_file( if not data: raise HTTPException(status_code=400, detail="No flows found in the file") - folder_name = generate_unique_folder_name(data["folder_name"], current_user.id, session) + folder_name = await generate_unique_folder_name(data["folder_name"], current_user.id, session) data["folder_name"] = folder_name @@ -235,8 +295,8 @@ async def upload_file( new_folder.id = None new_folder.user_id = current_user.id session.add(new_folder) - session.commit() - session.refresh(new_folder) + await session.commit() + await session.refresh(new_folder) del data["folder_name"] del data["folder_description"] @@ -247,9 +307,9 @@ async def upload_file( raise HTTPException(status_code=400, detail="No flows found in the data") # Now we set the user_id for all flows for flow in flow_list.flows: - flow_name = generate_unique_flow_name(flow.name, current_user.id, session) + flow_name = await generate_unique_flow_name(flow.name, current_user.id, session) flow.name = flow_name flow.user_id = current_user.id flow.folder_id = new_folder.id - return create_flows(session=session, flow_list=flow_list, current_user=current_user) + return await create_flows(session=session, flow_list=flow_list, current_user=current_user) diff --git a/src/backend/base/langflow/api/v1/login.py b/src/backend/base/langflow/api/v1/login.py index a966ea446808..05d583e75067 100644 --- a/src/backend/base/langflow/api/v1/login.py +++ b/src/backend/base/langflow/api/v1/login.py @@ -1,8 +1,11 @@ +from __future__ import annotations + +from typing import Annotated + from fastapi import APIRouter, Depends, HTTPException, Request, Response, status from fastapi.security import OAuth2PasswordRequestForm -from langflow.services.database.models.user.crud import get_user_by_id -from sqlmodel import Session +from langflow.api.utils import DbSession from langflow.api.v1.schemas import Token from langflow.services.auth.utils import ( authenticate_user, @@ -11,28 +14,24 @@ create_user_tokens, ) from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist -from langflow.services.deps import get_session, get_settings_service, get_variable_service -from langflow.services.settings.service import SettingsService -from langflow.services.variable.service import VariableService +from langflow.services.database.models.user.crud import get_user_by_id +from langflow.services.deps import get_settings_service, get_variable_service router = APIRouter(tags=["Login"]) @router.post("/login", response_model=Token) -async def login_to_get_access_token( +def login_to_get_access_token( response: Response, - form_data: OAuth2PasswordRequestForm = Depends(), - db: Session = Depends(get_session), - # _: Session = Depends(get_current_active_user) - settings_service=Depends(get_settings_service), - variable_service: VariableService = Depends(get_variable_service), + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + db: DbSession, ): - auth_settings = settings_service.auth_settings + auth_settings = get_settings_service().auth_settings try: user = authenticate_user(form_data.username, form_data.password, db) except Exception as exc: if isinstance(exc, HTTPException): - raise exc + raise raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc), @@ -67,25 +66,22 @@ async def login_to_get_access_token( expires=None, # Set to None to make it a session cookie domain=auth_settings.COOKIE_DOMAIN, ) - variable_service.initialize_user_variables(user.id, db) + get_variable_service().initialize_user_variables(user.id, db) # Create default folder for user if it doesn't exist create_default_folder_if_it_doesnt_exist(db, user.id) return tokens - else: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Incorrect username or password", - headers={"WWW-Authenticate": "Bearer"}, - ) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) @router.get("/auto_login") -async def auto_login( - response: Response, db: Session = Depends(get_session), settings_service=Depends(get_settings_service) -): - auth_settings = settings_service.auth_settings +async def auto_login(response: Response, db: DbSession): + auth_settings = get_settings_service().auth_settings - if settings_service.auth_settings.AUTO_LOGIN: + if auth_settings.AUTO_LOGIN: user_id, tokens = create_user_longterm_token(db) response.set_cookie( "access_token_lf", @@ -128,10 +124,9 @@ async def auto_login( async def refresh_token( request: Request, response: Response, - settings_service: "SettingsService" = Depends(get_settings_service), - db: Session = Depends(get_session), + db: DbSession, ): - auth_settings = settings_service.auth_settings + auth_settings = get_settings_service().auth_settings token = request.cookies.get("refresh_token_lf") @@ -156,12 +151,11 @@ async def refresh_token( domain=auth_settings.COOKIE_DOMAIN, ) return tokens - else: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid refresh token", - headers={"WWW-Authenticate": "Bearer"}, - ) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid refresh token", + headers={"WWW-Authenticate": "Bearer"}, + ) @router.post("/logout") diff --git a/src/backend/base/langflow/api/v1/monitor.py b/src/backend/base/langflow/api/v1/monitor.py index 99e86c6c17c7..cf7e28a55bf4 100644 --- a/src/backend/base/langflow/api/v1/monitor.py +++ b/src/backend/base/langflow/api/v1/monitor.py @@ -1,58 +1,51 @@ -from typing import List, Optional +from typing import Annotated from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, Query from sqlalchemy import delete -from sqlmodel import Session, col, select +from sqlmodel import col, select +from langflow.api.utils import AsyncDbSession, DbSession +from langflow.schema.message import MessageResponse from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.message.model import MessageRead, MessageTable, MessageUpdate from langflow.services.database.models.transactions.crud import get_transactions_by_flow_id from langflow.services.database.models.transactions.model import TransactionReadResponse -from langflow.services.database.models.user.model import User from langflow.services.database.models.vertex_builds.crud import ( - get_vertex_builds_by_flow_id, delete_vertex_builds_by_flow_id, + get_vertex_builds_by_flow_id, ) from langflow.services.database.models.vertex_builds.model import VertexBuildMapModel -from langflow.services.deps import get_session -from langflow.services.monitor.schema import MessageModelResponse router = APIRouter(prefix="/monitor", tags=["Monitor"]) -@router.get("/builds", response_model=VertexBuildMapModel) -async def get_vertex_builds( - flow_id: UUID = Query(), - session: Session = Depends(get_session), -): +@router.get("/builds") +async def get_vertex_builds(flow_id: Annotated[UUID, Query()], session: AsyncDbSession) -> VertexBuildMapModel: try: - vertex_builds = get_vertex_builds_by_flow_id(session, flow_id) + vertex_builds = await get_vertex_builds_by_flow_id(session, flow_id) return VertexBuildMapModel.from_list_of_dicts(vertex_builds) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e @router.delete("/builds", status_code=204) -async def delete_vertex_builds( - flow_id: UUID = Query(), - session: Session = Depends(get_session), -): +def delete_vertex_builds(flow_id: Annotated[UUID, Query()], session: DbSession) -> None: try: delete_vertex_builds_by_flow_id(session, flow_id) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e -@router.get("/messages", response_model=List[MessageModelResponse]) +@router.get("/messages") async def get_messages( - flow_id: Optional[str] = Query(None), - session_id: Optional[str] = Query(None), - sender: Optional[str] = Query(None), - sender_name: Optional[str] = Query(None), - order_by: Optional[str] = Query("timestamp"), - session: Session = Depends(get_session), -): + session: AsyncDbSession, + flow_id: Annotated[str | None, Query()] = None, + session_id: Annotated[str | None, Query()] = None, + sender: Annotated[str | None, Query()] = None, + sender_name: Annotated[str | None, Query()] = None, + order_by: Annotated[str | None, Query()] = "timestamp", +) -> list[MessageResponse]: try: stmt = select(MessageTable) if flow_id: @@ -66,72 +59,110 @@ async def get_messages( if order_by: col = getattr(MessageTable, order_by).asc() stmt = stmt.order_by(col) - messages = session.exec(stmt) - return [MessageModelResponse.model_validate(d, from_attributes=True) for d in messages] + messages = await session.exec(stmt) + return [MessageResponse.model_validate(d, from_attributes=True) for d in messages] except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e -@router.delete("/messages", status_code=204) -async def delete_messages( - message_ids: List[UUID], - session: Session = Depends(get_session), - current_user: User = Depends(get_current_active_user), -): +@router.delete("/messages", status_code=204, dependencies=[Depends(get_current_active_user)]) +async def delete_messages(message_ids: list[UUID], session: AsyncDbSession) -> None: try: - session.exec(delete(MessageTable).where(MessageTable.id.in_(message_ids))) # type: ignore - session.commit() + await session.exec(delete(MessageTable).where(MessageTable.id.in_(message_ids))) # type: ignore[attr-defined] + await session.commit() except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e -@router.put("/messages/{message_id}", response_model=MessageRead) +@router.put("/messages/{message_id}", dependencies=[Depends(get_current_active_user)], response_model=MessageRead) async def update_message( message_id: UUID, message: MessageUpdate, - session: Session = Depends(get_session), - user: User = Depends(get_current_active_user), + session: AsyncDbSession, ): try: - db_message = session.get(MessageTable, message_id) - if not db_message: - raise HTTPException(status_code=404, detail="Message not found") + db_message = await session.get(MessageTable, message_id) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if not db_message: + raise HTTPException(status_code=404, detail="Message not found") + + try: message_dict = message.model_dump(exclude_unset=True, exclude_none=True) + message_dict["edit"] = True db_message.sqlmodel_update(message_dict) session.add(db_message) - session.commit() - session.refresh(db_message) - return db_message - except HTTPException as e: - raise e + await session.commit() + await session.refresh(db_message) except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return db_message + + +@router.patch( + "/messages/session/{old_session_id}", + dependencies=[Depends(get_current_active_user)], +) +async def update_session_id( + old_session_id: str, + new_session_id: Annotated[str, Query(..., description="The new session ID to update to")], + session: AsyncDbSession, +) -> list[MessageResponse]: + try: + # Get all messages with the old session ID + stmt = select(MessageTable).where(MessageTable.session_id == old_session_id) + messages = (await session.exec(stmt)).all() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + if not messages: + raise HTTPException(status_code=404, detail="No messages found with the given session ID") + + try: + # Update all messages with the new session ID + for message in messages: + message.session_id = new_session_id + + session.add_all(messages) + + await session.commit() + message_responses = [] + for message in messages: + await session.refresh(message) + message_responses.append(MessageResponse.model_validate(message, from_attributes=True)) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + return message_responses @router.delete("/messages/session/{session_id}", status_code=204) async def delete_messages_session( session_id: str, - session: Session = Depends(get_session), + session: AsyncDbSession, ): try: - session.exec( # type: ignore + await session.exec( delete(MessageTable) .where(col(MessageTable.session_id) == session_id) .execution_options(synchronize_session="fetch") ) - session.commit() - return {"message": "Messages deleted successfully"} + await session.commit() except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e + + return {"message": "Messages deleted successfully"} -@router.get("/transactions", response_model=List[TransactionReadResponse]) +@router.get("/transactions") async def get_transactions( - flow_id: UUID = Query(), - session: Session = Depends(get_session), -): + flow_id: Annotated[UUID, Query()], + session: AsyncDbSession, +) -> list[TransactionReadResponse]: try: - transactions = get_transactions_by_flow_id(session, flow_id) + transactions = await get_transactions_by_flow_id(session, flow_id) return [ TransactionReadResponse( transaction_id=t.id, @@ -147,4 +178,4 @@ async def get_transactions( for t in transactions ] except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py index da55fd5e9c51..9d9b21018ffd 100644 --- a/src/backend/base/langflow/api/v1/schemas.py +++ b/src/backend/base/langflow/api/v1/schemas.py @@ -1,10 +1,17 @@ from datetime import datetime, timezone from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any from uuid import UUID -from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator, model_serializer +from pydantic import ( + BaseModel, + ConfigDict, + Field, + field_serializer, + field_validator, + model_serializer, +) from langflow.graph.schema import RunOutputs from langflow.graph.utils import serialize_field @@ -15,7 +22,9 @@ from langflow.services.database.models.base import orjson_dumps from langflow.services.database.models.flow import FlowCreate, FlowRead from langflow.services.database.models.user import UserRead +from langflow.services.settings.feature_flags import FeatureFlags from langflow.services.tracing.schema import Log +from langflow.utils.util_strings import truncate_long_strings class BuildStatus(Enum): @@ -28,7 +37,7 @@ class BuildStatus(Enum): class TweaksRequest(BaseModel): - tweaks: Optional[Dict[str, Dict[str, Any]]] = Field(default_factory=dict) + tweaks: dict[str, dict[str, Any]] | None = Field(default_factory=dict) class UpdateTemplateRequest(BaseModel): @@ -38,25 +47,25 @@ class UpdateTemplateRequest(BaseModel): class TaskResponse(BaseModel): """Task response schema.""" - id: Optional[str] = Field(None) - href: Optional[str] = Field(None) + id: str | None = Field(None) + href: str | None = Field(None) class ProcessResponse(BaseModel): """Process response schema.""" result: Any - status: Optional[str] = None - task: Optional[TaskResponse] = None - session_id: Optional[str] = None - backend: Optional[str] = None + status: str | None = None + task: TaskResponse | None = None + session_id: str | None = None + backend: str | None = None class RunResponse(BaseModel): """Run response schema.""" - outputs: Optional[List[RunOutputs]] = [] - session_id: Optional[str] = None + outputs: list[RunOutputs] | None = [] + session_id: str | None = None @model_serializer(mode="plain") def serialize(self): @@ -76,23 +85,23 @@ def serialize(self): class PreloadResponse(BaseModel): """Preload response schema.""" - session_id: Optional[str] = None - is_clear: Optional[bool] = None + session_id: str | None = None + is_clear: bool | None = None class TaskStatusResponse(BaseModel): """Task status response schema.""" status: str - result: Optional[Any] = None + result: Any | None = None class ChatMessage(BaseModel): """Chat message schema.""" is_bot: bool = False - message: Union[str, None, dict] = None - chatKey: Optional[str] = None + message: str | None | dict = None + chat_key: str | None = Field(None, serialization_alias="chatKey") type: str = "human" @@ -108,8 +117,9 @@ class ChatResponse(ChatMessage): @field_validator("type") @classmethod def validate_message_type(cls, v): - if v not in ["start", "stream", "end", "error", "info", "file"]: - raise ValueError("type must be start, stream, end, error, info, or file") + if v not in {"start", "stream", "end", "error", "info", "file"}: + msg = "type must be start, stream, end, error, info, or file" + raise ValueError(msg) return v @@ -132,31 +142,32 @@ class FileResponse(ChatMessage): @field_validator("data_type") @classmethod def validate_data_type(cls, v): - if v not in ["image", "csv"]: - raise ValueError("data_type must be image or csv") + if v not in {"image", "csv"}: + msg = "data_type must be image or csv" + raise ValueError(msg) return v class FlowListCreate(BaseModel): - flows: List[FlowCreate] + flows: list[FlowCreate] class FlowListIds(BaseModel): - flow_ids: List[str] + flow_ids: list[str] class FlowListRead(BaseModel): - flows: List[FlowRead] + flows: list[FlowRead] class FlowListReadWithFolderName(BaseModel): - flows: List[FlowRead] + flows: list[FlowRead] name: str description: str class InitResponse(BaseModel): - flowId: str + flow_id: str = Field(serialization_alias="flowId") class BuiltResponse(BaseModel): @@ -166,7 +177,7 @@ class BuiltResponse(BaseModel): class UploadFileResponse(BaseModel): """Upload file response schema.""" - flowId: str + flow_id: str = Field(serialization_alias="flowId") file_path: Path @@ -181,7 +192,7 @@ def __str__(self) -> str: class CustomComponentRequest(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) code: str - frontend_node: Optional[dict] = None + frontend_node: dict | None = None class CustomComponentResponse(BaseModel): @@ -191,7 +202,7 @@ class CustomComponentResponse(BaseModel): class UpdateCustomComponentRequest(CustomComponentRequest): field: str - field_value: Optional[Union[str, int, float, bool, dict, list]] = None + field_value: str | int | float | bool | dict | list | None = None template: dict def get_template(self): @@ -204,16 +215,16 @@ class CustomComponentResponseError(BaseModel): class ComponentListCreate(BaseModel): - flows: List[FlowCreate] + flows: list[FlowCreate] class ComponentListRead(BaseModel): - flows: List[FlowRead] + flows: list[FlowRead] class UsersResponse(BaseModel): total_count: int - users: List[UserRead] + users: list[UserRead] class ApiKeyResponse(BaseModel): @@ -227,7 +238,7 @@ class ApiKeyResponse(BaseModel): class ApiKeysResponse(BaseModel): total_count: int user_id: UUID - api_keys: List[ApiKeyRead] + api_keys: list[ApiKeyRead] class CreateApiKeyRequest(BaseModel): @@ -245,20 +256,20 @@ class ApiKeyCreateRequest(BaseModel): class VerticesOrderResponse(BaseModel): - ids: List[str] + ids: list[str] run_id: UUID - vertices_to_run: List[str] + vertices_to_run: list[str] class ResultDataResponse(BaseModel): - results: Optional[Any] = Field(default_factory=dict) + results: Any | None = Field(default_factory=dict) outputs: dict[str, OutputValue] = Field(default_factory=dict) logs: dict[str, list[Log]] = Field(default_factory=dict) - message: Optional[Any] = Field(default_factory=dict) - artifacts: Optional[Any] = Field(default_factory=dict) - timedelta: Optional[float] = None - duration: Optional[str] = None - used_frozen_result: Optional[bool] = False + message: Any | None = Field(default_factory=dict) + artifacts: Any | None = Field(default_factory=dict) + timedelta: float | None = None + duration: str | None = None + used_frozen_result: bool | None = False @field_serializer("results") @classmethod @@ -269,29 +280,36 @@ def serialize_results(cls, v): class VertexBuildResponse(BaseModel): - id: Optional[str] = None - inactivated_vertices: Optional[List[str]] = None - next_vertices_ids: Optional[List[str]] = None - top_level_vertices: Optional[List[str]] = None + id: str | None = None + inactivated_vertices: list[str] | None = None + next_vertices_ids: list[str] | None = None + top_level_vertices: list[str] | None = None valid: bool - params: Optional[Any] = Field(default_factory=dict) + params: Any | None = Field(default_factory=dict) """JSON string of the params.""" data: ResultDataResponse """Mapping of vertex ids to result dict containing the param name and result value.""" - timestamp: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc)) + timestamp: datetime | None = Field(default_factory=lambda: datetime.now(timezone.utc)) """Timestamp of the build.""" + @field_serializer("data") + def serialize_data(self, data: ResultDataResponse) -> dict: + data_dict = data.model_dump() if isinstance(data, BaseModel) else data + return truncate_long_strings(data_dict) + class VerticesBuiltResponse(BaseModel): - vertices: List[VertexBuildResponse] + vertices: list[VertexBuildResponse] class InputValueRequest(BaseModel): - components: Optional[List[str]] = [] - input_value: Optional[str] = None - type: Optional[InputType] = Field( + components: list[str] | None = [] + input_value: str | None = None + session: str | None = None + type: InputType | None = Field( "any", - description="Defines on which components the input value should be applied. 'any' applies to all input components.", + description="Defines on which components the input value should be applied. " + "'any' applies to all input components.", ) # add an example @@ -301,9 +319,16 @@ class InputValueRequest(BaseModel): { "components": ["components_id", "Component Name"], "input_value": "input_value", + "session": "session_id", }, {"components": ["Component Name"], "input_value": "input_value"}, {"input_value": "input_value"}, + { + "components": ["Component Name"], + "input_value": "input_value", + "session": "session_id", + }, + {"input_value": "input_value", "session": "session_id"}, {"type": "chat", "input_value": "input_value"}, {"type": "json", "input_value": '{"key": "value"}'}, ] @@ -313,15 +338,15 @@ class InputValueRequest(BaseModel): class SimplifiedAPIRequest(BaseModel): - input_value: Optional[str] = Field(default=None, description="The input value") - input_type: Optional[InputType] = Field(default="chat", description="The input type") - output_type: Optional[OutputType] = Field(default="chat", description="The output type") - output_component: Optional[str] = Field( + input_value: str | None = Field(default=None, description="The input value") + input_type: InputType | None = Field(default="chat", description="The input type") + output_type: OutputType | None = Field(default="chat", description="The output type") + output_component: str | None = Field( default="", description="If there are multiple output components, you can specify the component to get the output from.", ) - tweaks: Optional[Tweaks] = Field(default=None, description="The tweaks") - session_id: Optional[str] = Field(default=None, description="The session id") + tweaks: Tweaks | None = Field(default=None, description="The tweaks") + session_id: str | None = Field(default=None, description="The session id") # (alias) type ReactFlowJsonObject = { @@ -331,13 +356,15 @@ class SimplifiedAPIRequest(BaseModel): # } # import ReactFlowJsonObject class FlowDataRequest(BaseModel): - nodes: List[dict] - edges: List[dict] - viewport: Optional[dict] = None + nodes: list[dict] + edges: list[dict] + viewport: dict | None = None class ConfigResponse(BaseModel): + feature_flags: FeatureFlags frontend_timeout: int auto_saving: bool auto_saving_interval: int health_check_max_retries: int + max_file_size_upload: int diff --git a/src/backend/base/langflow/api/v1/starter_projects.py b/src/backend/base/langflow/api/v1/starter_projects.py index c429759c6414..8e8b99a84ade 100644 --- a/src/backend/base/langflow/api/v1/starter_projects.py +++ b/src/backend/base/langflow/api/v1/starter_projects.py @@ -1,26 +1,17 @@ -from typing import List - from fastapi import APIRouter, Depends, HTTPException -from loguru import logger from langflow.graph.graph.schema import GraphDump from langflow.services.auth.utils import get_current_active_user -from langflow.services.database.models.user.model import User router = APIRouter(prefix="/starter-projects", tags=["Flows"]) -@router.get("/", response_model=List[GraphDump], status_code=200) -def get_starter_projects( - *, - current_user: User = Depends(get_current_active_user), -): +@router.get("/", dependencies=[Depends(get_current_active_user)], status_code=200) +async def get_starter_projects() -> list[GraphDump]: """Get a list of starter projects.""" from langflow.initial_setup.load import get_starter_projects_dump try: - flows = get_starter_projects_dump() - return flows + return get_starter_projects_dump() except Exception as exc: - logger.error(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc diff --git a/src/backend/base/langflow/api/v1/store.py b/src/backend/base/langflow/api/v1/store.py index d645480d096b..0466f55143e9 100644 --- a/src/backend/base/langflow/api/v1/store.py +++ b/src/backend/base/langflow/api/v1/store.py @@ -1,14 +1,14 @@ -from typing import Annotated, List, Optional, Union +import asyncio +from typing import Annotated from uuid import UUID from fastapi import APIRouter, Depends, HTTPException, Query from loguru import logger -from langflow.api.utils import check_langflow_version +from langflow.api.utils import CurrentActiveUser, check_langflow_version from langflow.services.auth import utils as auth_utils -from langflow.services.database.models.user.model import User from langflow.services.deps import get_settings_service, get_store_service -from langflow.services.store.exceptions import CustomException +from langflow.services.store.exceptions import CustomError from langflow.services.store.schema import ( CreateComponentResponse, DownloadComponentResponse, @@ -17,59 +17,47 @@ TagResponse, UsersLikesResponse, ) -from langflow.services.store.service import StoreService router = APIRouter(prefix="/store", tags=["Components Store"]) -def get_user_store_api_key( - user: User = Depends(auth_utils.get_current_active_user), - settings_service=Depends(get_settings_service), -): +def get_user_store_api_key(user: CurrentActiveUser): if not user.store_api_key: raise HTTPException(status_code=400, detail="You must have a store API key set.") try: - decrypted = auth_utils.decrypt_api_key(user.store_api_key, settings_service) - return decrypted + return auth_utils.decrypt_api_key(user.store_api_key, get_settings_service()) except Exception as e: raise HTTPException(status_code=500, detail="Failed to decrypt API key. Please set a new one.") from e -def get_optional_user_store_api_key( - user: User = Depends(auth_utils.get_current_active_user), - settings_service=Depends(get_settings_service), -): +def get_optional_user_store_api_key(user: CurrentActiveUser): if not user.store_api_key: return None try: - decrypted = auth_utils.decrypt_api_key(user.store_api_key, settings_service) - return decrypted - except Exception as e: - logger.error(f"Failed to decrypt API key: {e}") + return auth_utils.decrypt_api_key(user.store_api_key, get_settings_service()) + except Exception: # noqa: BLE001 + logger.exception("Failed to decrypt API key") return user.store_api_key @router.get("/check/") -def check_if_store_is_enabled( - settings_service=Depends(get_settings_service), -): +async def check_if_store_is_enabled(): return { - "enabled": settings_service.settings.store, + "enabled": get_settings_service().settings.store, } @router.get("/check/api_key") async def check_if_store_has_api_key( - api_key: Optional[str] = Depends(get_optional_user_store_api_key), - store_service: StoreService = Depends(get_store_service), + api_key: Annotated[str | None, Depends(get_optional_user_store_api_key)], ): if api_key is None: return {"has_api_key": False, "is_valid": False} try: - is_valid = await store_service.check_api_key(api_key) + is_valid = await get_store_service().check_api_key(api_key) except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e return {"has_api_key": api_key is not None, "is_valid": is_valid} @@ -77,50 +65,46 @@ async def check_if_store_has_api_key( @router.post("/components/", response_model=CreateComponentResponse, status_code=201) async def share_component( component: StoreComponentCreate, - store_service: StoreService = Depends(get_store_service), - store_api_key: str = Depends(get_user_store_api_key), -): + store_api_key: Annotated[str, Depends(get_user_store_api_key)], +) -> CreateComponentResponse: try: - await check_langflow_version(component) - result = await store_service.upload(store_api_key, component) - return result + await asyncio.to_thread(check_langflow_version, component) + return await get_store_service().upload(store_api_key, component) except Exception as exc: - raise HTTPException(status_code=400, detail=str(exc)) + raise HTTPException(status_code=400, detail=str(exc)) from exc -@router.patch("/components/{component_id}", response_model=CreateComponentResponse, status_code=201) +@router.patch("/components/{component_id}", status_code=201) async def update_shared_component( component_id: UUID, component: StoreComponentCreate, - store_service: StoreService = Depends(get_store_service), - store_api_key: str = Depends(get_user_store_api_key), -): + store_api_key: Annotated[str, Depends(get_user_store_api_key)], +) -> CreateComponentResponse: try: - await check_langflow_version(component) - result = await store_service.update(store_api_key, component_id, component) - return result + await asyncio.to_thread(check_langflow_version, component) + return await get_store_service().update(store_api_key, component_id, component) except Exception as exc: - raise HTTPException(status_code=400, detail=str(exc)) + raise HTTPException(status_code=400, detail=str(exc)) from exc -@router.get("/components/", response_model=ListComponentResponseModel) +@router.get("/components/") async def get_components( - component_id: Annotated[Optional[str], Query()] = None, - search: Annotated[Optional[str], Query()] = None, - private: Annotated[Optional[bool], Query()] = None, - is_component: Annotated[Optional[bool], Query()] = None, - tags: Annotated[Optional[list[str]], Query()] = None, - sort: Annotated[Union[list[str], None], Query()] = None, + *, + component_id: Annotated[str | None, Query()] = None, + search: Annotated[str | None, Query()] = None, + private: Annotated[bool | None, Query()] = None, + is_component: Annotated[bool | None, Query()] = None, + tags: Annotated[list[str] | None, Query()] = None, + sort: Annotated[list[str] | None, Query()] = None, liked: Annotated[bool, Query()] = False, filter_by_user: Annotated[bool, Query()] = False, - fields: Annotated[Optional[list[str]], Query()] = None, + fields: Annotated[list[str] | None, Query()] = None, page: int = 1, limit: int = 10, - store_service: StoreService = Depends(get_store_service), - store_api_key: Optional[str] = Depends(get_optional_user_store_api_key), -): + store_api_key: Annotated[str | None, Depends(get_optional_user_store_api_key)], +) -> ListComponentResponseModel: try: - return await store_service.get_list_component_response_model( + return await get_store_service().get_list_component_response_model( component_id=component_id, search=search, private=private, @@ -134,7 +118,7 @@ async def get_components( limit=limit, store_api_key=store_api_key, ) - except CustomException as exc: + except CustomError as exc: raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc except Exception as exc: raise HTTPException(status_code=500, detail=str(exc)) from exc @@ -143,12 +127,11 @@ async def get_components( @router.get("/components/{component_id}", response_model=DownloadComponentResponse) async def download_component( component_id: UUID, - store_service: StoreService = Depends(get_store_service), - store_api_key: str = Depends(get_user_store_api_key), -): + store_api_key: Annotated[str, Depends(get_user_store_api_key)], +) -> DownloadComponentResponse: try: - component = await store_service.download(store_api_key, component_id) - except CustomException as exc: + component = await get_store_service().download(store_api_key, component_id) + except CustomError as exc: raise HTTPException(status_code=400, detail=str(exc)) from exc except Exception as exc: raise HTTPException(status_code=500, detail=str(exc)) from exc @@ -159,43 +142,40 @@ async def download_component( return component -@router.get("/tags", response_model=List[TagResponse]) -async def get_tags( - store_service: StoreService = Depends(get_store_service), -): +@router.get("/tags", response_model=list[TagResponse]) +async def get_tags(): try: - return await store_service.get_tags() - except CustomException as exc: + return await get_store_service().get_tags() + except CustomError as exc: raise HTTPException(status_code=400, detail=str(exc)) from exc except Exception as exc: - raise HTTPException(status_code=500, detail=str(exc)) + raise HTTPException(status_code=500, detail=str(exc)) from exc -@router.get("/users/likes", response_model=List[UsersLikesResponse]) +@router.get("/users/likes", response_model=list[UsersLikesResponse]) async def get_list_of_components_liked_by_user( - store_service: StoreService = Depends(get_store_service), - store_api_key: str = Depends(get_user_store_api_key), + store_api_key: Annotated[str, Depends(get_user_store_api_key)], ): try: - return await store_service.get_user_likes(store_api_key) - except CustomException as exc: + return await get_store_service().get_user_likes(store_api_key) + except CustomError as exc: raise HTTPException(status_code=400, detail=str(exc)) from exc except Exception as exc: - raise HTTPException(status_code=500, detail=str(exc)) + raise HTTPException(status_code=500, detail=str(exc)) from exc -@router.post("/users/likes/{component_id}", response_model=UsersLikesResponse) +@router.post("/users/likes/{component_id}") async def like_component( component_id: UUID, - store_service: StoreService = Depends(get_store_service), - store_api_key: str = Depends(get_user_store_api_key), -): + store_api_key: Annotated[str, Depends(get_user_store_api_key)], +) -> UsersLikesResponse: try: + store_service = get_store_service() result = await store_service.like_component(store_api_key, str(component_id)) likes_count = await store_service.get_component_likes_count(str(component_id), store_api_key) return UsersLikesResponse(likes_count=likes_count, liked_by_user=result) - except CustomException as exc: + except CustomError as exc: raise HTTPException(status_code=exc.status_code, detail=str(exc)) from exc except Exception as exc: - raise HTTPException(status_code=500, detail=str(exc)) + raise HTTPException(status_code=500, detail=str(exc)) from exc diff --git a/src/backend/base/langflow/api/v1/users.py b/src/backend/base/langflow/api/v1/users.py index 56f8f443ff76..a2fcbc42c1b0 100644 --- a/src/backend/base/langflow/api/v1/users.py +++ b/src/backend/base/langflow/api/v1/users.py @@ -1,39 +1,37 @@ +from typing import Annotated from uuid import UUID from fastapi import APIRouter, Depends, HTTPException from sqlalchemy import func from sqlalchemy.exc import IntegrityError -from sqlmodel import Session, select +from sqlmodel import select from sqlmodel.sql.expression import SelectOfScalar +from langflow.api.utils import CurrentActiveUser, DbSession from langflow.api.v1.schemas import UsersResponse from langflow.services.auth.utils import ( get_current_active_superuser, - get_current_active_user, get_password_hash, verify_password, ) from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist from langflow.services.database.models.user import User, UserCreate, UserRead, UserUpdate from langflow.services.database.models.user.crud import get_user_by_id, update_user -from langflow.services.deps import get_session, get_settings_service +from langflow.services.deps import get_settings_service router = APIRouter(tags=["Users"], prefix="/users") @router.post("/", response_model=UserRead, status_code=201) -def add_user( +async def add_user( user: UserCreate, - session: Session = Depends(get_session), - settings_service=Depends(get_settings_service), + session: DbSession, ) -> User: - """ - Add a new user to the database. - """ + """Add a new user to the database.""" new_user = User.model_validate(user, from_attributes=True) try: new_user.password = get_password_hash(user.password) - new_user.is_active = settings_service.auth_settings.NEW_USER_IS_ACTIVE + new_user.is_active = get_settings_service().auth_settings.NEW_USER_IS_ACTIVE session.add(new_user) session.commit() session.refresh(new_user) @@ -48,49 +46,42 @@ def add_user( @router.get("/whoami", response_model=UserRead) -def read_current_user( - current_user: User = Depends(get_current_active_user), +async def read_current_user( + current_user: CurrentActiveUser, ) -> User: - """ - Retrieve the current user's data. - """ + """Retrieve the current user's data.""" return current_user -@router.get("/", response_model=UsersResponse) -def read_all_users( +@router.get("/", dependencies=[Depends(get_current_active_superuser)]) +async def read_all_users( + *, skip: int = 0, limit: int = 10, - _: Session = Depends(get_current_active_superuser), - session: Session = Depends(get_session), + session: DbSession, ) -> UsersResponse: - """ - Retrieve a list of users from the database with pagination. - """ + """Retrieve a list of users from the database with pagination.""" query: SelectOfScalar = select(User).offset(skip).limit(limit) users = session.exec(query).fetchall() - count_query = select(func.count()).select_from(User) # type: ignore + count_query = select(func.count()).select_from(User) total_count = session.exec(count_query).first() return UsersResponse( - total_count=total_count, # type: ignore + total_count=total_count, users=[UserRead(**user.model_dump()) for user in users], ) @router.patch("/{user_id}", response_model=UserRead) -def patch_user( +async def patch_user( user_id: UUID, user_update: UserUpdate, - user: User = Depends(get_current_active_user), - session: Session = Depends(get_session), + user: CurrentActiveUser, + session: DbSession, ) -> User: - """ - Update an existing user's data. - """ - - update_password = user_update.password is not None and user_update.password != "" + """Update an existing user's data.""" + update_password = bool(user_update.password) if not user.is_superuser and user_update.is_superuser: raise HTTPException(status_code=403, detail="Permission denied") @@ -106,20 +97,17 @@ def patch_user( if not update_password: user_update.password = user_db.password return update_user(user_db, user_update, session) - else: - raise HTTPException(status_code=404, detail="User not found") + raise HTTPException(status_code=404, detail="User not found") @router.patch("/{user_id}/reset-password", response_model=UserRead) -def reset_password( +async def reset_password( user_id: UUID, user_update: UserUpdate, - user: User = Depends(get_current_active_user), - session: Session = Depends(get_session), + user: CurrentActiveUser, + session: DbSession, ) -> User: - """ - Reset a user's password. - """ + """Reset a user's password.""" if user_id != user.id: raise HTTPException(status_code=400, detail="You can't change another user's password") @@ -135,18 +123,16 @@ def reset_password( return user -@router.delete("/{user_id}", response_model=dict) -def delete_user( +@router.delete("/{user_id}") +async def delete_user( user_id: UUID, - current_user: User = Depends(get_current_active_superuser), - session: Session = Depends(get_session), + current_user: Annotated[User, Depends(get_current_active_superuser)], + session: DbSession, ) -> dict: - """ - Delete a user from the database. - """ + """Delete a user from the database.""" if current_user.id == user_id: raise HTTPException(status_code=400, detail="You can't delete your own user account") - elif not current_user.is_superuser: + if not current_user.is_superuser: raise HTTPException(status_code=403, detail="Permission denied") user_db = session.exec(select(User).where(User.id == user_id)).first() diff --git a/src/backend/base/langflow/api/v1/validate.py b/src/backend/base/langflow/api/v1/validate.py index bd36d95f5890..06617152d71e 100644 --- a/src/backend/base/langflow/api/v1/validate.py +++ b/src/backend/base/langflow/api/v1/validate.py @@ -9,8 +9,8 @@ router = APIRouter(prefix="/validate", tags=["Validate"]) -@router.post("/code", status_code=200, response_model=CodeValidationResponse) -def post_validate_code(code: Code): +@router.post("/code", status_code=200) +async def post_validate_code(code: Code) -> CodeValidationResponse: try: errors = validate_code(code.code) return CodeValidationResponse( @@ -18,11 +18,12 @@ def post_validate_code(code: Code): function=errors.get("function", {}), ) except Exception as e: - return HTTPException(status_code=500, detail=str(e)) + logger.opt(exception=True).debug("Error validating code") + raise HTTPException(status_code=500, detail=str(e)) from e -@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) -def post_validate_prompt(prompt_request: ValidatePromptRequest): +@router.post("/prompt", status_code=200) +async def post_validate_prompt(prompt_request: ValidatePromptRequest) -> PromptValidationResponse: try: if not prompt_request.frontend_node: return PromptValidationResponse( @@ -43,5 +44,4 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest): frontend_node=prompt_request.frontend_node, ) except Exception as e: - logger.exception(e) raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/src/backend/base/langflow/api/v1/variable.py b/src/backend/base/langflow/api/v1/variable.py index d8fe33957035..5b3e3e6e839d 100644 --- a/src/backend/base/langflow/api/v1/variable.py +++ b/src/backend/base/langflow/api/v1/variable.py @@ -1,42 +1,38 @@ from uuid import UUID -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, HTTPException from sqlalchemy.exc import NoResultFound -from sqlmodel import Session -from langflow.services.auth.utils import get_current_active_user -from langflow.services.database.models.user.model import User +from langflow.api.utils import CurrentActiveUser, DbSession from langflow.services.database.models.variable import VariableCreate, VariableRead, VariableUpdate -from langflow.services.deps import get_session, get_settings_service, get_variable_service -from langflow.services.variable.base import VariableService -from langflow.services.variable.service import GENERIC_TYPE, DatabaseVariableService +from langflow.services.deps import get_variable_service +from langflow.services.variable.constants import GENERIC_TYPE +from langflow.services.variable.service import DatabaseVariableService router = APIRouter(prefix="/variables", tags=["Variables"]) @router.post("/", response_model=VariableRead, status_code=201) -def create_variable( +async def create_variable( *, - session: Session = Depends(get_session), + session: DbSession, variable: VariableCreate, - current_user: User = Depends(get_current_active_user), - settings_service=Depends(get_settings_service), - variable_service: DatabaseVariableService = Depends(get_variable_service), + current_user: CurrentActiveUser, ): """Create a new variable.""" - try: - if not variable.name and not variable.value: - raise HTTPException(status_code=400, detail="Variable name and value cannot be empty") - - if not variable.name: - raise HTTPException(status_code=400, detail="Variable name cannot be empty") + variable_service = get_variable_service() + if not variable.name and not variable.value: + raise HTTPException(status_code=400, detail="Variable name and value cannot be empty") - if not variable.value: - raise HTTPException(status_code=400, detail="Variable value cannot be empty") + if not variable.name: + raise HTTPException(status_code=400, detail="Variable name cannot be empty") - if variable.name in variable_service.list_variables(user_id=current_user.id, session=session): - raise HTTPException(status_code=400, detail="Variable name already exists") + if not variable.value: + raise HTTPException(status_code=400, detail="Variable value cannot be empty") + if variable.name in variable_service.list_variables(user_id=current_user.id, session=session): + raise HTTPException(status_code=400, detail="Variable name already exists") + try: return variable_service.create_variable( user_id=current_user.id, name=variable.name, @@ -47,35 +43,40 @@ def create_variable( ) except Exception as e: if isinstance(e, HTTPException): - raise e + raise raise HTTPException(status_code=500, detail=str(e)) from e @router.get("/", response_model=list[VariableRead], status_code=200) -def read_variables( +async def read_variables( *, - session: Session = Depends(get_session), - current_user: User = Depends(get_current_active_user), - variable_service: DatabaseVariableService = Depends(get_variable_service), + session: DbSession, + current_user: CurrentActiveUser, ): """Read all variables.""" + variable_service = get_variable_service() + if not isinstance(variable_service, DatabaseVariableService): + msg = "Variable service is not an instance of DatabaseVariableService" + raise TypeError(msg) try: return variable_service.get_all(user_id=current_user.id, session=session) - except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e @router.patch("/{variable_id}", response_model=VariableRead, status_code=200) -def update_variable( +async def update_variable( *, - session: Session = Depends(get_session), + session: DbSession, variable_id: UUID, variable: VariableUpdate, - current_user: User = Depends(get_current_active_user), - variable_service: DatabaseVariableService = Depends(get_variable_service), + current_user: CurrentActiveUser, ): """Update a variable.""" + variable_service = get_variable_service() + if not isinstance(variable_service, DatabaseVariableService): + msg = "Variable service is not an instance of DatabaseVariableService" + raise TypeError(msg) try: return variable_service.update_variable_fields( user_id=current_user.id, @@ -83,22 +84,22 @@ def update_variable( variable=variable, session=session, ) - except NoResultFound: - raise HTTPException(status_code=404, detail="Variable not found") + except NoResultFound as e: + raise HTTPException(status_code=404, detail="Variable not found") from e except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e @router.delete("/{variable_id}", status_code=204) -def delete_variable( +async def delete_variable( *, - session: Session = Depends(get_session), + session: DbSession, variable_id: UUID, - current_user: User = Depends(get_current_active_user), - variable_service: VariableService = Depends(get_variable_service), -): + current_user: CurrentActiveUser, +) -> None: """Delete a variable.""" + variable_service = get_variable_service() try: variable_service.delete_variable_by_id(user_id=current_user.id, variable_id=variable_id, session=session) except Exception as e: diff --git a/src/backend/base/langflow/base/agents/agent.py b/src/backend/base/langflow/base/agents/agent.py index 834ec376d51d..77ffe1980e60 100644 --- a/src/backend/base/langflow/base/agents/agent.py +++ b/src/backend/base/langflow/base/agents/agent.py @@ -1,44 +1,68 @@ +import asyncio from abc import abstractmethod -from typing import List, Optional, Union, cast +from typing import TYPE_CHECKING, cast from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent from langchain.agents.agent import RunnableAgent -from langchain_core.messages import BaseMessage from langchain_core.runnables import Runnable from langflow.base.agents.callback import AgentAsyncHandler +from langflow.base.agents.events import ExceptionWithMessageError, process_agent_events from langflow.base.agents.utils import data_to_messages from langflow.custom import Component -from langflow.field_typing import Text -from langflow.inputs.inputs import InputTypes +from langflow.custom.custom_component.component import _get_component_toolkit +from langflow.field_typing import Tool +from langflow.inputs.inputs import InputTypes, MultilineInput from langflow.io import BoolInput, HandleInput, IntInput, MessageTextInput +from langflow.memory import delete_message from langflow.schema import Data +from langflow.schema.content_block import ContentBlock +from langflow.schema.log import SendMessageFunctionType from langflow.schema.message import Message from langflow.template import Output from langflow.utils.constants import MESSAGE_SENDER_AI +if TYPE_CHECKING: + from langchain_core.messages import BaseMessage + + +DEFAULT_TOOLS_DESCRIPTION = "A helpful assistant with access to the following tools:" +DEFAULT_AGENT_NAME = "Agent ({tools_names})" + class LCAgentComponent(Component): trace_type = "agent" - _base_inputs: List[InputTypes] = [ - MessageTextInput(name="input_value", display_name="Input"), + _base_inputs: list[InputTypes] = [ + MessageTextInput( + name="input_value", + display_name="Input", + info="The input provided by the user for the agent to process.", + tool_mode=True, + ), BoolInput( name="handle_parsing_errors", display_name="Handle Parse Errors", value=True, advanced=True, + info="Should the Agent fix errors when reading user input for better processing?", ), - BoolInput( - name="verbose", - display_name="Verbose", - value=True, - advanced=True, - ), + BoolInput(name="verbose", display_name="Verbose", value=True, advanced=True), IntInput( name="max_iterations", display_name="Max Iterations", value=15, advanced=True, + info="The maximum number of attempts the agent can make to complete its task before it stops.", + ), + MultilineInput( + name="agent_description", + display_name="Agent Description", + info=( + "The description of the agent. This is only used when in Tool Mode. " + f"Defaults to '{DEFAULT_TOOLS_DESCRIPTION}' and tools are added dynamically." + ), + advanced=True, + value=DEFAULT_TOOLS_DESCRIPTION, ), ] @@ -50,29 +74,27 @@ class LCAgentComponent(Component): @abstractmethod def build_agent(self) -> AgentExecutor: """Create the agent.""" - pass async def message_response(self) -> Message: """Run the agent and return the response.""" agent = self.build_agent() - result = await self.run_agent(agent=agent) + message = await self.run_agent(agent=agent) - if isinstance(result, list): - result = "\n".join([result_dict["text"] for result_dict in result]) - message = Message(text=result, sender=MESSAGE_SENDER_AI) self.status = message return message - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["build_agent"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) - def get_agent_kwargs(self, flatten: bool = False) -> dict: + def get_agent_kwargs(self, *, flatten: bool = False) -> dict: base = { "handle_parsing_errors": self.handle_parsing_errors, "verbose": self.verbose, @@ -89,33 +111,82 @@ def get_agent_kwargs(self, flatten: bool = False) -> dict: } return {**base, "agent_executor_kwargs": agent_kwargs} - def get_chat_history_data(self) -> Optional[List[Data]]: + def get_chat_history_data(self) -> list[Data] | None: # might be overridden in subclasses return None - async def run_agent(self, agent: AgentExecutor) -> Text: + async def run_agent( + self, + agent: Runnable | BaseSingleActionAgent | BaseMultiActionAgent | AgentExecutor, + ) -> Message: + if isinstance(agent, AgentExecutor): + runnable = agent + else: + if not self.tools: + msg = "Tools are required to run the agent." + raise ValueError(msg) + runnable = AgentExecutor.from_agent_and_tools( + agent=agent, + tools=self.tools, + handle_parsing_errors=self.handle_parsing_errors, + verbose=self.verbose, + max_iterations=self.max_iterations, + ) input_dict: dict[str, str | list[BaseMessage]] = {"input": self.input_value} - self.chat_history = self.get_chat_history_data() if self.chat_history: input_dict["chat_history"] = data_to_messages(self.chat_history) - result = await agent.ainvoke( - input_dict, config={"callbacks": [AgentAsyncHandler(self.log)] + self.get_langchain_callbacks()} + + if hasattr(self, "graph"): + session_id = self.graph.session_id + elif hasattr(self, "_session_id"): + session_id = self._session_id + else: + session_id = None + + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name=self.display_name or "Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id=session_id, ) + try: + result = await process_agent_events( + runnable.astream_events( + input_dict, + config={"callbacks": [AgentAsyncHandler(self.log), *self.get_langchain_callbacks()]}, + version="v2", + ), + agent_message, + cast(SendMessageFunctionType, self.send_message), + ) + except ExceptionWithMessageError as e: + msg_id = e.agent_message.id + await asyncio.to_thread(delete_message, id_=msg_id) + self._send_message_event(e.agent_message, category="remove_message") + raise + except Exception: + raise + self.status = result - if "output" not in result: - raise ValueError("Output key not found in result. Tried 'output'.") + return result - return cast(str, result.get("output")) + @abstractmethod + def create_agent_runnable(self) -> Runnable: + """Create the agent.""" class LCToolsAgentComponent(LCAgentComponent): - _base_inputs = LCAgentComponent._base_inputs + [ + _base_inputs = [ HandleInput( name="tools", display_name="Tools", - input_types=["Tool", "BaseTool"], + input_types=["Tool", "BaseTool", "StructuredTool"], is_list=True, + required=False, + info="These are the tools that the agent can use to help with tasks.", ), + *LCAgentComponent._base_inputs, ] def build_agent(self) -> AgentExecutor: @@ -126,34 +197,31 @@ def build_agent(self) -> AgentExecutor: **self.get_agent_kwargs(flatten=True), ) - async def run_agent( - self, - agent: Union[Runnable, BaseSingleActionAgent, BaseMultiActionAgent, AgentExecutor], - ) -> Text: - if isinstance(agent, AgentExecutor): - runnable = agent - else: - runnable = AgentExecutor.from_agent_and_tools( - agent=agent, # type: ignore - tools=self.tools, - handle_parsing_errors=self.handle_parsing_errors, - verbose=self.verbose, - max_iterations=self.max_iterations, - ) - input_dict: dict[str, str | list[BaseMessage]] = {"input": self.input_value} - if self.chat_history: - input_dict["chat_history"] = data_to_messages(self.chat_history) - - result = await runnable.ainvoke( - input_dict, config={"callbacks": [AgentAsyncHandler(self.log)] + self.get_langchain_callbacks()} - ) - self.status = result - if "output" not in result: - raise ValueError("Output key not found in result. Tried 'output'.") - - return cast(str, result.get("output")) - @abstractmethod def create_agent_runnable(self) -> Runnable: """Create the agent.""" - pass + + def get_tool_name(self) -> str: + return self.display_name or "Agent" + + def get_tool_description(self) -> str: + return self.agent_description or DEFAULT_TOOLS_DESCRIPTION + + def _build_tools_names(self): + tools_names = "" + if self.tools: + tools_names = ", ".join([tool.name for tool in self.tools]) + return tools_names + + def to_toolkit(self) -> list[Tool]: + component_toolkit = _get_component_toolkit() + tools_names = self._build_tools_names() + agent_description = self.get_tool_description() + # Check if tools_description is the default value + if agent_description == DEFAULT_TOOLS_DESCRIPTION: + description = f"{agent_description}{tools_names}" + else: + description = agent_description + return component_toolkit(component=self).get_tools( + tool_name=self.get_tool_name(), tool_description=description, callbacks=self.get_langchain_callbacks() + ) diff --git a/src/backend/base/langflow/base/agents/callback.py b/src/backend/base/langflow/base/agents/callback.py index 5d7260b6cf74..1ff6d2c0424e 100644 --- a/src/backend/base/langflow/base/agents/callback.py +++ b/src/backend/base/langflow/base/agents/callback.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any from uuid import UUID from langchain.callbacks.base import AsyncCallbackHandler @@ -13,16 +13,43 @@ class AgentAsyncHandler(AsyncCallbackHandler): def __init__(self, log_function: LogFunctionType | None = None): self.log_function = log_function + async def on_chain_start( + self, + serialized: dict[str, Any], + inputs: dict[str, Any], + *, + run_id: UUID, + parent_run_id: UUID | None = None, + tags: list[str] | None = None, + metadata: dict[str, Any] | None = None, + **kwargs: Any, + ) -> None: + if self.log_function is None: + return + self.log_function( + { + "type": "chain_start", + "serialized": serialized, + "inputs": inputs, + "run_id": run_id, + "parent_run_id": parent_run_id, + "tags": tags, + "metadata": metadata, + **kwargs, + }, + name="Chain Start", + ) + async def on_tool_start( self, - serialized: Dict[str, Any], + serialized: dict[str, Any], input_str: str, *, run_id: UUID, parent_run_id: UUID | None = None, - tags: List[str] | None = None, - metadata: Dict[str, Any] | None = None, - inputs: Dict[str, Any] | None = None, + tags: list[str] | None = None, + metadata: dict[str, Any] | None = None, + inputs: dict[str, Any] | None = None, **kwargs: Any, ) -> None: if self.log_function is None: @@ -62,7 +89,7 @@ async def on_agent_action( *, run_id: UUID, parent_run_id: UUID | None = None, - tags: List[str] | None = None, + tags: list[str] | None = None, **kwargs: Any, ) -> None: if self.log_function is None: @@ -85,7 +112,7 @@ async def on_agent_finish( *, run_id: UUID, parent_run_id: UUID | None = None, - tags: List[str] | None = None, + tags: list[str] | None = None, **kwargs: Any, ) -> None: if self.log_function is None: diff --git a/src/backend/base/langflow/base/agents/context.py b/src/backend/base/langflow/base/agents/context.py new file mode 100644 index 000000000000..8e4961ecc579 --- /dev/null +++ b/src/backend/base/langflow/base/agents/context.py @@ -0,0 +1,109 @@ +from datetime import datetime, timezone +from typing import Any + +from langchain_core.language_models import BaseLanguageModel, BaseLLM +from langchain_core.language_models.chat_models import BaseChatModel +from pydantic import BaseModel, Field, field_validator, model_serializer + +from langflow.field_typing import LanguageModel +from langflow.schema.data import Data + + +class AgentContext(BaseModel): + tools: dict[str, Any] + llm: Any + context: str = "" + iteration: int = 0 + max_iterations: int = 5 + thought: str = "" + last_action: Any = None + last_action_result: Any = None + final_answer: Any = "" + context_history: list[tuple[str, str, str]] = Field(default_factory=list) + + @model_serializer(mode="plain") + def serialize_agent_context(self): + serliazed_llm = self.llm.to_json() if hasattr(self.llm, "to_json") else str(self.llm) + serliazed_tools = {k: v.to_json() if hasattr(v, "to_json") else str(v) for k, v in self.tools.items()} + return { + "tools": serliazed_tools, + "llm": serliazed_llm, + "context": self.context, + "iteration": self.iteration, + "max_iterations": self.max_iterations, + "thought": self.thought, + "last_action": self.last_action.to_json() + if hasattr(self.last_action, "to_json") + else str(self.last_action), + "action_result": self.last_action_result.to_json() + if hasattr(self.last_action_result, "to_json") + else str(self.last_action_result), + "final_answer": self.final_answer, + "context_history": self.context_history, + } + + @field_validator("llm", mode="before") + @classmethod + def validate_llm(cls, v) -> LanguageModel: + if not isinstance(v, BaseLLM | BaseChatModel | BaseLanguageModel): + msg = "llm must be an instance of LanguageModel" + raise TypeError(msg) + return v + + def to_data_repr(self): + data_objs = [] + for name, val, time_str in self.context_history: + content = val.content if hasattr(val, "content") else val + data_objs.append(Data(name=name, value=content, timestamp=time_str)) + + sorted_data_objs = sorted(data_objs, key=lambda x: datetime.fromisoformat(x.timestamp), reverse=True) + + sorted_data_objs.append( + Data( + name="Formatted Context", + value=self.get_full_context(), + ) + ) + return sorted_data_objs + + def _build_tools_context(self): + tool_context = "" + for tool_name, tool_obj in self.tools.items(): + tool_context += f"{tool_name}: {tool_obj.description}\n" + return tool_context + + def _build_init_context(self): + return f""" +{self.context} + +""" + + def model_post_init(self, _context: Any) -> None: + if hasattr(self.llm, "bind_tools"): + self.llm = self.llm.bind_tools(self.tools.values()) + if self.context: + self.update_context("Initial Context", self.context) + + def update_context(self, key: str, value: str): + self.context_history.insert(0, (key, value, datetime.now(tz=timezone.utc).astimezone().isoformat())) + + def _serialize_context_history_tuple(self, context_history_tuple: tuple[str, str, str]) -> str: + name, value, _ = context_history_tuple + if hasattr(value, "content"): + value = value.content + elif hasattr(value, "log"): + value = value.log + return f"{name}: {value}" + + def get_full_context(self) -> str: + context_history_reversed = self.context_history[::-1] + context_formatted = "\n".join( + [ + self._serialize_context_history_tuple(context_history_tuple) + for context_history_tuple in context_history_reversed + ] + ) + return f""" +Context: +{context_formatted} +""" diff --git a/src/backend/base/langflow/base/agents/crewai/crew.py b/src/backend/base/langflow/base/agents/crewai/crew.py index 359b87591fd4..8a78199558a5 100644 --- a/src/backend/base/langflow/base/agents/crewai/crew.py +++ b/src/backend/base/langflow/base/agents/crewai/crew.py @@ -1,7 +1,8 @@ -from typing import Callable, List, Tuple, Union, cast +from collections.abc import Callable +from typing import cast -from crewai import Agent, Crew, Process, Task # type: ignore -from crewai.task import TaskOutput # type: ignore +from crewai import Agent, Crew, Process, Task +from crewai.task import TaskOutput from langchain_core.agents import AgentAction, AgentFinish from langflow.custom import Component @@ -45,16 +46,14 @@ def get_tasks_and_agents(self) -> tuple[list[Task], list[Agent]]: return self.tasks, self.agents def build_crew(self) -> Crew: - raise NotImplementedError("build_crew must be implemented in subclasses") + msg = "build_crew must be implemented in subclasses" + raise NotImplementedError(msg) def get_task_callback( self, ) -> Callable: - def task_callback(task_output: TaskOutput): - if self._vertex: - vertex_id = self._vertex.id - else: - vertex_id = self.display_name or self.__class__.__name__ + def task_callback(task_output: TaskOutput) -> None: + vertex_id = self._vertex.id if self._vertex else self.display_name or self.__class__.__name__ self.log(task_output.model_dump(), name=f"Task (Agent: {task_output.agent}) - {vertex_id}") return task_callback @@ -62,7 +61,7 @@ def task_callback(task_output: TaskOutput): def get_step_callback( self, ) -> Callable: - def step_callback(agent_output: Union[AgentFinish, List[Tuple[AgentAction, str]]]): + def step_callback(agent_output: AgentFinish | list[tuple[AgentAction, str]]) -> None: _id = self._vertex.id if self._vertex else self.display_name if isinstance(agent_output, AgentFinish): messages = agent_output.messages @@ -79,6 +78,6 @@ def step_callback(agent_output: Union[AgentFinish, List[Tuple[AgentAction, str]] async def build_output(self) -> Message: crew = self.build_crew() result = await crew.kickoff_async() - message = Message(text=result, sender=MESSAGE_SENDER_AI) + message = Message(text=result.raw, sender=MESSAGE_SENDER_AI) self.status = message return message diff --git a/src/backend/base/langflow/base/agents/crewai/tasks.py b/src/backend/base/langflow/base/agents/crewai/tasks.py index 8e6f6cb3979a..b2f5a5a27aca 100644 --- a/src/backend/base/langflow/base/agents/crewai/tasks.py +++ b/src/backend/base/langflow/base/agents/crewai/tasks.py @@ -1,4 +1,4 @@ -from crewai import Task # type: ignore +from crewai import Task class SequentialTask(Task): diff --git a/src/backend/base/langflow/base/agents/default_prompts.py b/src/backend/base/langflow/base/agents/default_prompts.py index 04e3948130c4..49d184a2c7d6 100644 --- a/src/backend/base/langflow/base/agents/default_prompts.py +++ b/src/backend/base/langflow/base/agents/default_prompts.py @@ -20,4 +20,4 @@ {chat_history} Question: {input} - {agent_scratchpad}""" + {agent_scratchpad}""" # noqa: E501 diff --git a/src/backend/base/langflow/base/agents/events.py b/src/backend/base/langflow/base/agents/events.py new file mode 100644 index 000000000000..0c409ab6543d --- /dev/null +++ b/src/backend/base/langflow/base/agents/events.py @@ -0,0 +1,270 @@ +# Add helper functions for each event type +from collections.abc import AsyncIterator +from time import perf_counter +from typing import Any, Protocol + +from langchain_core.agents import AgentFinish +from langchain_core.messages import BaseMessage +from typing_extensions import TypedDict + +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import TextContent, ToolContent +from langflow.schema.log import SendMessageFunctionType +from langflow.schema.message import Message + + +class ExceptionWithMessageError(Exception): + def __init__(self, agent_message: Message): + self.agent_message = agent_message + super().__init__() + + +class InputDict(TypedDict): + input: str + chat_history: list[BaseMessage] + + +def _build_agent_input_text_content(agent_input_dict: InputDict) -> str: + chat_history = agent_input_dict.get("chat_history", []) + messages = [ + f"**{message.type.upper()}**: {message.content}" + for message in chat_history + if isinstance(message, BaseMessage) and message.content + ] + final_input = agent_input_dict.get("input", "") + if messages and final_input not in messages[-1]: + messages.append(f"**HUMAN**: {final_input}") + return " \n".join(messages) + + +def _calculate_duration(start_time: float) -> int: + """Calculate duration in milliseconds from start time to now.""" + # Handle the calculation + current_time = perf_counter() + if isinstance(start_time, int): + # If we got an integer, treat it as milliseconds + duration = current_time - (start_time / 1000) + result = int(duration * 1000) + else: + # If we got a float, treat it as perf_counter time + result = int((current_time - start_time) * 1000) + + return result + + +def handle_on_chain_start( + event: dict[str, Any], agent_message: Message, send_message_method: SendMessageFunctionType, start_time: float +) -> tuple[Message, float]: + # Create content blocks if they don't exist + if not agent_message.content_blocks: + agent_message.content_blocks = [ContentBlock(title="Agent Steps", contents=[])] + + if event["data"].get("input"): + input_data = event["data"].get("input") + if isinstance(input_data, dict) and "input" in input_data: + # Cast the input_data to InputDict + input_dict: InputDict = { + "input": str(input_data.get("input", "")), + "chat_history": input_data.get("chat_history", []), + } + text_content = TextContent( + type="text", + text=_build_agent_input_text_content(input_dict), + duration=_calculate_duration(start_time), + header={"title": "Input", "icon": "MessageSquare"}, + ) + agent_message.content_blocks[0].contents.append(text_content) + agent_message = send_message_method(message=agent_message) + start_time = perf_counter() + return agent_message, start_time + + +def handle_on_chain_end( + event: dict[str, Any], agent_message: Message, send_message_method: SendMessageFunctionType, start_time: float +) -> tuple[Message, float]: + data_output = event["data"].get("output") + if data_output and isinstance(data_output, AgentFinish) and data_output.return_values.get("output"): + agent_message.text = data_output.return_values.get("output") + agent_message.properties.state = "complete" + # Add duration to the last content if it exists + if agent_message.content_blocks: + duration = _calculate_duration(start_time) + text_content = TextContent( + type="text", + text=agent_message.text, + duration=duration, + header={"title": "Output", "icon": "MessageSquare"}, + ) + agent_message.content_blocks[0].contents.append(text_content) + agent_message = send_message_method(message=agent_message) + start_time = perf_counter() + return agent_message, start_time + + +def handle_on_tool_start( + event: dict[str, Any], + agent_message: Message, + tool_blocks_map: dict[str, ToolContent], + send_message_method: SendMessageFunctionType, + start_time: float, +) -> tuple[Message, float]: + tool_name = event["name"] + tool_input = event["data"].get("input") + run_id = event.get("run_id", "") + tool_key = f"{tool_name}_{run_id}" + + # Create content blocks if they don't exist + if not agent_message.content_blocks: + agent_message.content_blocks = [ContentBlock(title="Agent Steps", contents=[])] + + duration = _calculate_duration(start_time) + new_start_time = perf_counter() # Get new start time for next operation + + # Create new tool content with the input exactly as received + tool_content = ToolContent( + type="tool_use", + name=tool_name, + input=tool_input, + output=None, + error=None, + header={"title": f"Accessing **{tool_name}**", "icon": "Hammer"}, + duration=duration, # Store the actual duration + ) + + # Store in map and append to message + tool_blocks_map[tool_key] = tool_content + agent_message.content_blocks[0].contents.append(tool_content) + + agent_message = send_message_method(message=agent_message) + tool_blocks_map[tool_key] = agent_message.content_blocks[0].contents[-1] + return agent_message, new_start_time + + +def handle_on_tool_end( + event: dict[str, Any], + agent_message: Message, + tool_blocks_map: dict[str, ToolContent], + send_message_method: SendMessageFunctionType, + start_time: float, +) -> tuple[Message, float]: + run_id = event.get("run_id", "") + tool_name = event.get("name", "") + tool_key = f"{tool_name}_{run_id}" + tool_content = tool_blocks_map.get(tool_key) + + if tool_content and isinstance(tool_content, ToolContent): + tool_content.output = event["data"].get("output") + duration = _calculate_duration(start_time) + tool_content.duration = duration + tool_content.header = {"title": f"Executed **{tool_content.name}**", "icon": "Hammer"} + + agent_message = send_message_method(message=agent_message) + new_start_time = perf_counter() # Get new start time for next operation + return agent_message, new_start_time + return agent_message, start_time + + +def handle_on_tool_error( + event: dict[str, Any], + agent_message: Message, + tool_blocks_map: dict[str, ToolContent], + send_message_method: SendMessageFunctionType, + start_time: float, +) -> tuple[Message, float]: + run_id = event.get("run_id", "") + tool_name = event.get("name", "") + tool_key = f"{tool_name}_{run_id}" + tool_content = tool_blocks_map.get(tool_key) + + if tool_content and isinstance(tool_content, ToolContent): + tool_content.error = event["data"].get("error", "Unknown error") + tool_content.duration = _calculate_duration(start_time) + tool_content.header = {"title": f"Error using **{tool_content.name}**", "icon": "Hammer"} + agent_message = send_message_method(message=agent_message) + start_time = perf_counter() + return agent_message, start_time + + +def handle_on_chain_stream( + event: dict[str, Any], + agent_message: Message, + send_message_method: SendMessageFunctionType, + start_time: float, +) -> tuple[Message, float]: + data_chunk = event["data"].get("chunk", {}) + if isinstance(data_chunk, dict) and data_chunk.get("output"): + agent_message.text = data_chunk.get("output") + agent_message.properties.state = "complete" + agent_message = send_message_method(message=agent_message) + start_time = perf_counter() + return agent_message, start_time + + +class ToolEventHandler(Protocol): + def __call__( + self, + event: dict[str, Any], + agent_message: Message, + tool_blocks_map: dict[str, ContentBlock], + send_message_method: SendMessageFunctionType, + start_time: float, + ) -> tuple[Message, float]: ... + + +class ChainEventHandler(Protocol): + def __call__( + self, + event: dict[str, Any], + agent_message: Message, + send_message_method: SendMessageFunctionType, + start_time: float, + ) -> tuple[Message, float]: ... + + +EventHandler = ToolEventHandler | ChainEventHandler + +# Define separate mappings of event types to their respective handler functions +CHAIN_EVENT_HANDLERS: dict[str, ChainEventHandler] = { + "on_chain_start": handle_on_chain_start, + "on_chain_end": handle_on_chain_end, + "on_chain_stream": handle_on_chain_stream, +} + +TOOL_EVENT_HANDLERS: dict[str, ToolEventHandler] = { + "on_tool_start": handle_on_tool_start, + "on_tool_end": handle_on_tool_end, + "on_tool_error": handle_on_tool_error, +} + + +async def process_agent_events( + agent_executor: AsyncIterator[dict[str, Any]], + agent_message: Message, + send_message_method: SendMessageFunctionType, +) -> Message: + """Process agent events and return the final output.""" + if isinstance(agent_message.properties, dict): + agent_message.properties.update({"icon": "Bot", "state": "partial"}) + else: + agent_message.properties.icon = "Bot" + agent_message.properties.state = "partial" + # Store the initial message + agent_message = send_message_method(message=agent_message) + try: + # Create a mapping of run_ids to tool contents + tool_blocks_map: dict[str, ToolContent] = {} + start_time = perf_counter() + async for event in agent_executor: + if event["event"] in TOOL_EVENT_HANDLERS: + tool_handler = TOOL_EVENT_HANDLERS[event["event"]] + agent_message, start_time = tool_handler( + event, agent_message, tool_blocks_map, send_message_method, start_time + ) + elif event["event"] in CHAIN_EVENT_HANDLERS: + chain_handler = CHAIN_EVENT_HANDLERS[event["event"]] + agent_message, start_time = chain_handler(event, agent_message, send_message_method, start_time) + agent_message.properties.state = "complete" + except Exception as e: + raise ExceptionWithMessageError(agent_message) from e + + return Message(**agent_message.model_dump()) diff --git a/src/backend/base/langflow/base/agents/utils.py b/src/backend/base/langflow/base/agents/utils.py index 2651ecb4a267..29c0e52b0209 100644 --- a/src/backend/base/langflow/base/agents/utils.py +++ b/src/backend/base/langflow/base/agents/utils.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, Dict, List, Optional, Sequence, Union +from collections.abc import Callable, Sequence +from typing import Any from langchain.agents import ( create_json_chat_agent, @@ -24,19 +25,18 @@ class AgentSpec(BaseModel): BaseLanguageModel, Sequence[BaseTool], BasePromptTemplate | ChatPromptTemplate, - Optional[Callable[[List[BaseTool]], str]], - Optional[Union[bool, List[str]]], + Callable[[list[BaseTool]], str] | None, + bool | list[str] | None, ], Any, ] - prompt: Optional[Any] = None - fields: List[str] - hub_repo: Optional[str] = None + prompt: Any | None = None + fields: list[str] + hub_repo: str | None = None -def data_to_messages(data: List[Data]) -> List[BaseMessage]: - """ - Convert a list of data to a list of messages. +def data_to_messages(data: list[Data]) -> list[BaseMessage]: + """Convert a list of data to a list of messages. Args: data (List[Data]): The data to convert. @@ -51,9 +51,9 @@ def validate_and_create_xml_agent( llm: BaseLanguageModel, tools: Sequence[BaseTool], prompt: BasePromptTemplate, - tools_renderer: Callable[[List[BaseTool]], str] = render_text_description, + tools_renderer: Callable[[list[BaseTool]], str] = render_text_description, *, - stop_sequence: Union[bool, List[str]] = True, + stop_sequence: bool | list[str] = True, ): return create_xml_agent( llm=llm, @@ -68,9 +68,9 @@ def validate_and_create_openai_tools_agent( llm: BaseLanguageModel, tools: Sequence[BaseTool], prompt: ChatPromptTemplate, - tools_renderer: Callable[[List[BaseTool]], str] = render_text_description, + _tools_renderer: Callable[[list[BaseTool]], str] = render_text_description, *, - stop_sequence: Union[bool, List[str]] = True, + _stop_sequence: bool | list[str] = True, ): return create_openai_tools_agent( llm=llm, @@ -83,9 +83,9 @@ def validate_and_create_tool_calling_agent( llm: BaseLanguageModel, tools: Sequence[BaseTool], prompt: ChatPromptTemplate, - tools_renderer: Callable[[List[BaseTool]], str] = render_text_description, + _tools_renderer: Callable[[list[BaseTool]], str] = render_text_description, *, - stop_sequence: Union[bool, List[str]] = True, + _stop_sequence: bool | list[str] = True, ): return create_tool_calling_agent( llm=llm, @@ -98,9 +98,9 @@ def validate_and_create_json_chat_agent( llm: BaseLanguageModel, tools: Sequence[BaseTool], prompt: ChatPromptTemplate, - tools_renderer: Callable[[List[BaseTool]], str] = render_text_description, + tools_renderer: Callable[[list[BaseTool]], str] = render_text_description, *, - stop_sequence: Union[bool, List[str]] = True, + stop_sequence: bool | list[str] = True, ): return create_json_chat_agent( llm=llm, @@ -111,7 +111,7 @@ def validate_and_create_json_chat_agent( ) -AGENTS: Dict[str, AgentSpec] = { +AGENTS: dict[str, AgentSpec] = { "Tool Calling Agent": AgentSpec( func=validate_and_create_tool_calling_agent, prompt=None, diff --git a/src/backend/base/langflow/base/astra_assistants/__init__.py b/src/backend/base/langflow/base/astra_assistants/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/base/astra_assistants/util.py b/src/backend/base/langflow/base/astra_assistants/util.py new file mode 100644 index 000000000000..13a79e01d6dc --- /dev/null +++ b/src/backend/base/langflow/base/astra_assistants/util.py @@ -0,0 +1,66 @@ +import importlib +import inspect +import json +import os +import pkgutil +import threading +from json.decoder import JSONDecodeError + +import astra_assistants.tools as astra_assistants_tools +import requests +from astra_assistants import OpenAIWithDefaultKey, patch +from astra_assistants.tools.tool_interface import ToolInterface +from requests.exceptions import RequestException + +from langflow.services.cache.utils import CacheMiss + +client_lock = threading.Lock() +client = None + + +def get_patched_openai_client(shared_component_cache): + os.environ["ASTRA_ASSISTANTS_QUIET"] = "true" + client = shared_component_cache.get("client") + if isinstance(client, CacheMiss): + client = patch(OpenAIWithDefaultKey()) + shared_component_cache.set("client", client) + return client + + +url = "https://raw.githubusercontent.com/BerriAI/litellm/refs/heads/main/model_prices_and_context_window.json" +try: + response = requests.get(url, timeout=10) + response.raise_for_status() + data = json.loads(response.text) +except RequestException: + data = {} +except JSONDecodeError: + data = {} + +# Extract the model names into a Python list +litellm_model_names = [model for model in data if model != "sample_spec"] + + +# To store the class names that extend ToolInterface +tool_names = [] +tools_and_names = {} + + +def tools_from_package(your_package) -> None: + # Iterate over all modules in the package + package_name = your_package.__name__ + for module_info in pkgutil.iter_modules(your_package.__path__): + module_name = f"{package_name}.{module_info.name}" + + # Dynamically import the module + module = importlib.import_module(module_name) + + # Iterate over all members of the module + for name, obj in inspect.getmembers(module, inspect.isclass): + # Check if the class is a subclass of ToolInterface and is not ToolInterface itself + if issubclass(obj, ToolInterface) and obj is not ToolInterface: + tool_names.append(name) + tools_and_names[name] = obj + + +tools_from_package(astra_assistants_tools) diff --git a/src/backend/base/langflow/base/chains/model.py b/src/backend/base/langflow/base/chains/model.py index efbe4ea19225..4f9d190f5b35 100644 --- a/src/backend/base/langflow/base/chains/model.py +++ b/src/backend/base/langflow/base/chains/model.py @@ -7,11 +7,13 @@ class LCChainComponent(Component): outputs = [Output(display_name="Text", name="text", method="invoke_chain")] - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["invoke_chain"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) diff --git a/src/backend/base/langflow/base/constants.py b/src/backend/base/langflow/base/constants.py index 319899847ab1..bb2126d72262 100644 --- a/src/backend/base/langflow/base/constants.py +++ b/src/backend/base/langflow/base/constants.py @@ -1,5 +1,4 @@ -""" -This module contains constants used in the Langflow base module. +"""This module contains constants used in the Langflow base module. Constants: - STREAM_INFO_TEXT: A string representing the information about streaming the response from the model. @@ -11,7 +10,16 @@ STREAM_INFO_TEXT = "Stream the response from the model. Streaming works only in Chat." -NODE_FORMAT_ATTRIBUTES = ["beta", "icon", "display_name", "description", "output_types", "edited"] +NODE_FORMAT_ATTRIBUTES = [ + "beta", + "legacy", + "icon", + "output_types", + "edited", + "metadata", + # remove display_name to prevent overwriting the display_name from the latest template + # "display_name", +] FIELD_FORMAT_ATTRIBUTES = [ diff --git a/src/backend/base/langflow/base/curl/parse.py b/src/backend/base/langflow/base/curl/parse.py index bb58e8fa2f65..93af7abe4226 100644 --- a/src/backend/base/langflow/base/curl/parse.py +++ b/src/backend/base/langflow/base/curl/parse.py @@ -1,41 +1,50 @@ -""" -This file contains a fix for the implementation of the `uncurl` library, which is available at https://github.com/spulec/uncurl.git. +r"""This file contains a fix for the implementation of the `uncurl` library, which is available at https://github.com/spulec/uncurl.git. -The `uncurl` library provides a way to parse and convert cURL commands into Python requests. However, there are some issues with the original implementation that this file aims to fix. +The `uncurl` library provides a way to parse and convert cURL commands into Python requests. +However, there are some issues with the original implementation that this file aims to fix. -The `parse_context` function in this file takes a cURL command as input and returns a `ParsedContext` object, which contains the parsed information from the cURL command, such as the HTTP method, URL, headers, cookies, etc. +The `parse_context` function in this file takes a cURL command as input and returns a `ParsedContext` object, +which contains the parsed information from the cURL command, such as the HTTP method, URL, headers, cookies, etc. -The `normalize_newlines` function is a helper function that replaces the line continuation character ("\") followed by a newline with a space. +The `normalize_newlines` function is a helper function that replaces the line continuation character ("\") +followed by a newline with a space. """ import re import shlex -from collections import OrderedDict, namedtuple +from collections import OrderedDict from http.cookies import SimpleCookie - -ParsedArgs = namedtuple( - "ParsedArgs", - [ - "command", - "url", - "data", - "data_binary", - "method", - "headers", - "compressed", - "insecure", - "user", - "include", - "silent", - "proxy", - "proxy_user", - "cookies", - ], -) - -ParsedContext = namedtuple("ParsedContext", ["method", "url", "data", "headers", "cookies", "verify", "auth", "proxy"]) +from typing import NamedTuple + + +class ParsedArgs(NamedTuple): + command: str | None + url: str | None + data: str | None + data_binary: str | None + method: str + headers: list[str] + compressed: bool + insecure: bool + user: tuple[str, str] + include: bool + silent: bool + proxy: str | None + proxy_user: str | None + cookies: dict[str, str] + + +class ParsedContext(NamedTuple): + method: str + url: str + data: str | None + headers: dict[str, str] + cookies: dict[str, str] + verify: bool + auth: tuple[str, str] | None + proxy: dict[str, str] | None def normalize_newlines(multiline_text): @@ -44,9 +53,10 @@ def normalize_newlines(multiline_text): def parse_curl_command(curl_command): tokens = shlex.split(normalize_newlines(curl_command)) - tokens = [token for token in tokens if token and token != " "] + tokens = [token for token in tokens if token and token != " "] # noqa: S105 if tokens and "curl" not in tokens[0]: - raise ValueError("Invalid curl command") + msg = "Invalid curl command" + raise ValueError(msg) args_template = { "command": None, "url": None, @@ -68,34 +78,34 @@ def parse_curl_command(curl_command): i = 0 while i < len(tokens): token = tokens[i] - if token == "-X": + if token == "-X": # noqa: S105 i += 1 args["method"] = tokens[i].lower() method_on_curl = tokens[i].lower() - elif token in ("-d", "--data"): + elif token in {"-d", "--data"}: i += 1 args["data"] = tokens[i] - elif token in ("-b", "--data-binary", "--data-raw"): + elif token in {"-b", "--data-binary", "--data-raw"}: i += 1 args["data_binary"] = tokens[i] - elif token in ("-H", "--header"): + elif token in {"-H", "--header"}: i += 1 args["headers"].append(tokens[i]) - elif token == "--compressed": + elif token == "--compressed": # noqa: S105 args["compressed"] = True - elif token in ("-k", "--insecure"): + elif token in {"-k", "--insecure"}: args["insecure"] = True - elif token in ("-u", "--user"): + elif token in {"-u", "--user"}: i += 1 args["user"] = tuple(tokens[i].split(":")) - elif token in ("-I", "--include"): + elif token in {"-I", "--include"}: args["include"] = True - elif token in ("-s", "--silent"): + elif token in {"-s", "--silent"}: args["silent"] = True - elif token in ("-x", "--proxy"): + elif token in {"-x", "--proxy"}: i += 1 args["proxy"] = tokens[i] - elif token in ("-U", "--proxy-user"): + elif token in {"-U", "--proxy-user"}: i += 1 args["proxy_user"] = tokens[i] elif not token.startswith("-"): @@ -153,13 +163,13 @@ def parse_context(curl_command): # proxy_auth = parsed_args.proxy_user if parsed_args.proxy and parsed_args.proxy_user: proxies = { - "http": "http://{}@{}/".format(parsed_args.proxy_user, parsed_args.proxy), - "https": "http://{}@{}/".format(parsed_args.proxy_user, parsed_args.proxy), + "http": f"http://{parsed_args.proxy_user}@{parsed_args.proxy}/", + "https": f"http://{parsed_args.proxy_user}@{parsed_args.proxy}/", } elif parsed_args.proxy: proxies = { - "http": "http://{}/".format(parsed_args.proxy), - "https": "http://{}/".format(parsed_args.proxy), + "http": f"http://{parsed_args.proxy}/", + "https": f"http://{parsed_args.proxy}/", } return ParsedContext( diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py index 7f8a41831fdd..4fb05041160c 100644 --- a/src/backend/base/langflow/base/data/utils.py +++ b/src/backend/base/langflow/base/data/utils.py @@ -1,12 +1,12 @@ import unicodedata -import xml.etree.ElementTree as ET +from collections.abc import Callable from concurrent import futures from pathlib import Path -from typing import Callable, List, Optional import chardet import orjson import yaml +from defusedxml import ElementTree from langflow.schema import Data @@ -44,16 +44,31 @@ def is_hidden(path: Path) -> bool: return path.name.startswith(".") +def format_directory_path(path: str) -> str: + """Format a directory path to ensure it's properly escaped and valid. + + Args: + path (str): The input path string. + + Returns: + str: A properly formatted path string. + """ + return path.replace("\n", "\\n") + + def retrieve_file_paths( path: str, + *, load_hidden: bool, recursive: bool, depth: int, - types: List[str] = TEXT_FILE_TYPES, -) -> List[str]: + types: list[str] = TEXT_FILE_TYPES, +) -> list[str]: + path = format_directory_path(path) path_obj = Path(path) if not path_obj.exists() or not path_obj.is_dir(): - raise ValueError(f"Path {path} must exist and be a directory.") + msg = f"Path {path} must exist and be a directory." + raise ValueError(msg) def match_types(p: Path) -> bool: return any(p.suffix == f".{t}" for t in types) if types else True @@ -70,59 +85,56 @@ def walk_level(directory: Path, max_depth: int): glob = "**/*" if recursive else "*" paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob) - file_paths = [str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)] - - return file_paths + return [str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)] -def partition_file_to_data(file_path: str, silent_errors: bool) -> Optional[Data]: +def partition_file_to_data(file_path: str, *, silent_errors: bool) -> Data | None: # Use the partition function to load the file - from unstructured.partition.auto import partition # type: ignore + from unstructured.partition.auto import partition try: elements = partition(file_path) except Exception as e: if not silent_errors: - raise ValueError(f"Error loading file {file_path}: {e}") from e + msg = f"Error loading file {file_path}: {e}" + raise ValueError(msg) from e return None # Create a Data text = "\n\n".join([str(el) for el in elements]) metadata = elements.metadata if hasattr(elements, "metadata") else {} metadata["file_path"] = file_path - record = Data(text=text, data=metadata) - return record + return Data(text=text, data=metadata) def read_text_file(file_path: str) -> str: - with open(file_path, "rb") as f: - raw_data = f.read() - result = chardet.detect(raw_data) - encoding = result["encoding"] + _file_path = Path(file_path) + raw_data = _file_path.read_bytes() + result = chardet.detect(raw_data) + encoding = result["encoding"] - if encoding in ["Windows-1252", "Windows-1254", "MacRoman"]: - encoding = "utf-8" + if encoding in {"Windows-1252", "Windows-1254", "MacRoman"}: + encoding = "utf-8" - with open(file_path, "r", encoding=encoding) as f: - return f.read() + return _file_path.read_text(encoding=encoding) def read_docx_file(file_path: str) -> str: - from docx import Document # type: ignore + from docx import Document doc = Document(file_path) return "\n\n".join([p.text for p in doc.paragraphs]) def parse_pdf_to_text(file_path: str) -> str: - from pypdf import PdfReader # type: ignore + from pypdf import PdfReader - with open(file_path, "rb") as f: + with Path(file_path).open("rb") as f: reader = PdfReader(f) return "\n\n".join([page.extract_text() for page in reader.pages]) -def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Optional[Data]: +def parse_text_file_to_data(file_path: str, *, silent_errors: bool) -> Data | None: try: if file_path.endswith(".pdf"): text = parse_pdf_to_text(file_path) @@ -140,18 +152,18 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Optional[Dat text = [normalize_text(item) if isinstance(item, str) else item for item in text] text = orjson.dumps(text).decode("utf-8") - elif file_path.endswith(".yaml") or file_path.endswith(".yml"): + elif file_path.endswith((".yaml", ".yml")): text = yaml.safe_load(text) elif file_path.endswith(".xml"): - xml_element = ET.fromstring(text) - text = ET.tostring(xml_element, encoding="unicode") + xml_element = ElementTree.fromstring(text) + text = ElementTree.tostring(xml_element, encoding="unicode") except Exception as e: if not silent_errors: - raise ValueError(f"Error loading file {file_path}: {e}") from e + msg = f"Error loading file {file_path}: {e}" + raise ValueError(msg) from e return None - record = Data(data={"file_path": file_path, "text": text}) - return record + return Data(data={"file_path": file_path, "text": text}) # ! Removing unstructured dependency until @@ -171,14 +183,15 @@ def parse_text_file_to_data(file_path: str, silent_errors: bool) -> Optional[Dat def parallel_load_data( - file_paths: List[str], + file_paths: list[str], + *, silent_errors: bool, max_concurrency: int, load_function: Callable = parse_text_file_to_data, -) -> List[Optional[Data]]: +) -> list[Data | None]: with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor: loaded_files = executor.map( - lambda file_path: load_function(file_path, silent_errors), + lambda file_path: load_function(file_path, silent_errors=silent_errors), file_paths, ) # loaded_files is an iterator, so we need to convert it to a list diff --git a/src/backend/base/langflow/base/document_transformers/__init__.py b/src/backend/base/langflow/base/document_transformers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/base/document_transformers/model.py b/src/backend/base/langflow/base/document_transformers/model.py new file mode 100644 index 000000000000..f4ecb30c6c19 --- /dev/null +++ b/src/backend/base/langflow/base/document_transformers/model.py @@ -0,0 +1,43 @@ +from abc import abstractmethod +from typing import Any + +from langchain_core.documents import BaseDocumentTransformer + +from langflow.custom import Component +from langflow.io import Output +from langflow.schema import Data +from langflow.utils.util import build_loader_repr_from_data + + +class LCDocumentTransformerComponent(Component): + trace_type = "document_transformer" + outputs = [ + Output(display_name="Data", name="data", method="transform_data"), + ] + + def transform_data(self) -> list[Data]: + data_input = self.get_data_input() + documents = [] + + if not isinstance(data_input, list): + data_input = [data_input] + + for _input in data_input: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + transformer = self.build_document_transformer() + docs = transformer.transform_documents(documents) + data = self.to_data(docs) + self.repr_value = build_loader_repr_from_data(data) + return data + + @abstractmethod + def get_data_input(self) -> Any: + """Get the data input.""" + + @abstractmethod + def build_document_transformer(self) -> BaseDocumentTransformer: + """Build the text splitter.""" diff --git a/src/backend/base/langflow/base/embeddings/aiml_embeddings.py b/src/backend/base/langflow/base/embeddings/aiml_embeddings.py new file mode 100644 index 000000000000..694e05c03fd9 --- /dev/null +++ b/src/backend/base/langflow/base/embeddings/aiml_embeddings.py @@ -0,0 +1,62 @@ +import concurrent.futures +import json + +import httpx +from langchain_core.pydantic_v1 import BaseModel, SecretStr +from loguru import logger + +from langflow.field_typing import Embeddings + + +class AIMLEmbeddingsImpl(BaseModel, Embeddings): + embeddings_completion_url: str = "https://api.aimlapi.com/v1/embeddings" + + api_key: SecretStr + model: str + + def embed_documents(self, texts: list[str]) -> list[list[float]]: + embeddings = [None] * len(texts) + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key.get_secret_value()}", + } + + with httpx.Client() as client, concurrent.futures.ThreadPoolExecutor() as executor: + futures = [] + for i, text in enumerate(texts): + futures.append((i, executor.submit(self._embed_text, client, headers, text))) + + for index, future in futures: + try: + result_data = future.result() + if len(result_data["data"]) != 1: + msg = f"Expected one embedding, got {len(result_data['data'])}" + raise ValueError(msg) + embeddings[index] = result_data["data"][0]["embedding"] + except ( + httpx.HTTPStatusError, + httpx.RequestError, + json.JSONDecodeError, + KeyError, + ValueError, + ): + logger.exception("Error occurred") + raise + + return embeddings # type: ignore[return-value] + + def _embed_text(self, client: httpx.Client, headers: dict, text: str) -> dict: + payload = { + "model": self.model, + "input": text, + } + response = client.post( + self.embeddings_completion_url, + headers=headers, + json=payload, + ) + response.raise_for_status() + return response.json() + + def embed_query(self, text: str) -> list[float]: + return self.embed_documents([text])[0] diff --git a/src/backend/base/langflow/base/embeddings/model.py b/src/backend/base/langflow/base/embeddings/model.py index f9059c608e07..182762bfbdc5 100644 --- a/src/backend/base/langflow/base/embeddings/model.py +++ b/src/backend/base/langflow/base/embeddings/model.py @@ -10,14 +10,17 @@ class LCEmbeddingsModel(Component): Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), ] - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["build_embeddings"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def build_embeddings(self) -> Embeddings: - raise NotImplementedError("You must implement the build_embeddings method in your class.") + msg = "You must implement the build_embeddings method in your class." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/base/flow_processing/utils.py b/src/backend/base/langflow/base/flow_processing/utils.py index 1505207ba9b4..f1e7fdc6c3d6 100644 --- a/src/backend/base/langflow/base/flow_processing/utils.py +++ b/src/backend/base/langflow/base/flow_processing/utils.py @@ -1,5 +1,3 @@ -from typing import List - from loguru import logger from langflow.graph.schema import ResultData, RunOutputs @@ -7,9 +5,8 @@ from langflow.schema.message import Message -def build_data_from_run_outputs(run_outputs: RunOutputs) -> List[Data]: - """ - Build a list of data from the given RunOutputs. +def build_data_from_run_outputs(run_outputs: RunOutputs) -> list[Data]: + """Build a list of data from the given RunOutputs. Args: run_outputs (RunOutputs): The RunOutputs object containing the output data. @@ -27,13 +24,11 @@ def build_data_from_run_outputs(run_outputs: RunOutputs) -> List[Data]: return data -def build_data_from_result_data(result_data: ResultData, get_final_results_only: bool = True) -> List[Data]: - """ - Build a list of data from the given ResultData. +def build_data_from_result_data(result_data: ResultData) -> list[Data]: + """Build a list of data from the given ResultData. Args: result_data (ResultData): The ResultData object containing the result data. - get_final_results_only (bool, optional): Whether to include only final results. Defaults to True. Returns: List[Data]: A list of data built from the ResultData. @@ -58,7 +53,7 @@ def build_data_from_result_data(result_data: ResultData, get_final_results_only: data.append(artifact) else: # Warn about unknown output type - logger.warning(f"Unable to build record output from unknown ResultData.artifact: {str(artifact)}") + logger.warning(f"Unable to build record output from unknown ResultData.artifact: {artifact}") # Chat or text output elif result_data.results: data.append(Data(data={"result": result_data.results}, text_key="result")) @@ -68,11 +63,8 @@ def build_data_from_result_data(result_data: ResultData, get_final_results_only: if isinstance(result_data.results, dict): for name, result in result_data.results.items(): - dataobj: Data | Message | None = None - if isinstance(result, Message): - dataobj = result - else: - dataobj = Data(data=result, text_key=name) + dataobj: Data | Message | None + dataobj = result if isinstance(result, Message) else Data(data=result, text_key=name) data.append(dataobj) else: @@ -80,9 +72,8 @@ def build_data_from_result_data(result_data: ResultData, get_final_results_only: return data -def format_flow_output_data(data: List[Data]) -> str: - """ - Format the flow output data into a string. +def format_flow_output_data(data: list[Data]) -> str: + """Format the flow output data into a string. Args: data (List[Data]): The list of data to format. diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py index b9ff4f3d1e98..965854a76246 100644 --- a/src/backend/base/langflow/base/io/chat.py +++ b/src/backend/base/langflow/base/io/chat.py @@ -1,96 +1,65 @@ -from typing import Optional, Union +from typing import cast -from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES from langflow.custom import Component from langflow.memory import store_message from langflow.schema import Data from langflow.schema.message import Message -from langflow.utils.constants import MESSAGE_SENDER_USER, MESSAGE_SENDER_AI class ChatComponent(Component): display_name = "Chat Component" description = "Use as base for chat components." - def build_config(self): - return { - "input_value": { - "input_types": ["Text"], - "display_name": "Text", - "multiline": True, - }, - "sender": { - "options": [MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], - "display_name": "Sender Type", - "advanced": True, - }, - "sender_name": {"display_name": "Sender Name", "advanced": True}, - "session_id": { - "display_name": "Session ID", - "info": "If provided, the message will be stored in the memory.", - "advanced": True, - }, - "return_message": { - "display_name": "Return Message", - "info": "Return the message as a Message containing the sender, sender_name, and session_id.", - "advanced": True, - }, - "data_template": { - "display_name": "Data Template", - "multiline": True, - "info": "In case of Message being a Data, this template will be used to convert it to text.", - "advanced": True, - }, - "files": { - "field_type": "file", - "display_name": "Files", - "file_types": TEXT_FILE_TYPES + IMG_FILE_TYPES, - "info": "Files to be sent with the message.", - "advanced": True, - }, - } - - # Keep this method for backward compatibility - def store_message( - self, - message: Message, - ) -> list[Message]: - messages = store_message( - message, - flow_id=self.graph.flow_id, - ) - - self.status = messages - return messages - def build_with_data( self, - sender: Optional[str] = "User", - sender_name: Optional[str] = "User", - input_value: Optional[Union[str, Data, Message]] = None, - files: Optional[list[str]] = None, - session_id: Optional[str] = None, - return_message: Optional[bool] = False, - ) -> Message: - message: Message | None = None - - if isinstance(input_value, Data): - # Update the data of the record - message = Message.from_data(input_value) - else: - message = Message( - text=input_value, sender=sender, sender_name=sender_name, files=files, session_id=session_id - ) - if not return_message: - message_text = message.text - else: - message_text = message # type: ignore + *, + sender: str | None = "User", + sender_name: str | None = "User", + input_value: str | Data | Message | None = None, + files: list[str] | None = None, + session_id: str | None = None, + return_message: bool = False, + ) -> str | Message: + message = self._create_message(input_value, sender, sender_name, files, session_id) + message_text = message.text if not return_message else message self.status = message_text if session_id and isinstance(message, Message) and isinstance(message.text, str): - messages = store_message( - message, - flow_id=self.graph.flow_id, - ) + flow_id = self.graph.flow_id if hasattr(self, "graph") else None + messages = store_message(message, flow_id=flow_id) self.status = messages - return message_text # type: ignore + self._send_messages_events(messages) + + return cast(str | Message, message_text) + + def _create_message(self, input_value, sender, sender_name, files, session_id) -> Message: + if isinstance(input_value, Data): + return Message.from_data(input_value) + return Message( + text=input_value, + sender=sender, + sender_name=sender_name, + files=files, + session_id=session_id, + category="message", + ) + + def _send_messages_events(self, messages) -> None: + if hasattr(self, "_event_manager") and self._event_manager: + for stored_message in messages: + id_ = stored_message.id + self._send_message_event(message=stored_message, id_=id_) + + def get_properties_from_source_component(self): + if hasattr(self, "_vertex") and hasattr(self._vertex, "incoming_edges") and self._vertex.incoming_edges: + source_id = self._vertex.incoming_edges[0].source_id + _source_vertex = self.graph.get_vertex(source_id) + component = _source_vertex.custom_component + source = component.display_name + icon = component.icon + possible_attributes = ["model_name", "model_id", "model"] + for attribute in possible_attributes: + if hasattr(component, attribute) and getattr(component, attribute): + return getattr(component, attribute), icon, source, component._id + return source, icon, component.display_name, component._id + return None, None, None, None diff --git a/src/backend/base/langflow/base/io/text.py b/src/backend/base/langflow/base/io/text.py index d7af7b0c941e..718bef88bee5 100644 --- a/src/backend/base/langflow/base/io/text.py +++ b/src/backend/base/langflow/base/io/text.py @@ -15,7 +15,8 @@ def build_config(self): "data_template": { "display_name": "Data Template", "multiline": True, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "info": "Template to convert Data to Text. " + "If left empty, it will be dynamically set to the Data's text key.", "advanced": True, }, } diff --git a/src/backend/base/langflow/base/langchain_utilities/model.py b/src/backend/base/langflow/base/langchain_utilities/model.py index e2c1c0321508..b8fca27510b1 100644 --- a/src/backend/base/langflow/base/langchain_utilities/model.py +++ b/src/backend/base/langflow/base/langchain_utilities/model.py @@ -1,5 +1,5 @@ from abc import abstractmethod -from typing import Sequence, Union +from collections.abc import Sequence from langflow.custom import Component from langflow.field_typing import Tool @@ -14,25 +14,21 @@ class LCToolComponent(Component): Output(name="api_build_tool", display_name="Tool", method="build_tool"), ] - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["run_model", "build_tool"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) @abstractmethod - def run_model(self) -> Union[Data, list[Data]]: - """ - Run model and return the output. - """ - pass + def run_model(self) -> Data | list[Data]: + """Run model and return the output.""" @abstractmethod def build_tool(self) -> Tool | Sequence[Tool]: - """ - Build the tool. - """ - pass + """Build the tool.""" diff --git a/src/backend/base/langflow/base/memory/memory.py b/src/backend/base/langflow/base/memory/memory.py index e55301a5f28f..2b6f0c59bec2 100644 --- a/src/backend/base/langflow/base/memory/memory.py +++ b/src/backend/base/langflow/base/memory/memory.py @@ -1,5 +1,3 @@ -from typing import Optional - from langflow.custom import CustomComponent from langflow.schema import Data from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER @@ -36,7 +34,8 @@ def build_config(self): "data_template": { "display_name": "Data Template", "multiline": True, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "info": "Template to convert Data to Text. " + "If left empty, it will be dynamically set to the Data's text key.", "advanced": True, }, } @@ -45,6 +44,6 @@ def get_messages(self, **kwargs) -> list[Data]: raise NotImplementedError def add_message( - self, sender: str, sender_name: str, text: str, session_id: str, metadata: Optional[dict] = None, **kwargs - ): + self, sender: str, sender_name: str, text: str, session_id: str, metadata: dict | None = None, **kwargs + ) -> None: raise NotImplementedError diff --git a/src/backend/base/langflow/base/memory/model.py b/src/backend/base/langflow/base/memory/model.py index a940dd0a774c..a33c7894ac86 100644 --- a/src/backend/base/langflow/base/memory/model.py +++ b/src/backend/base/langflow/base/memory/model.py @@ -1,9 +1,10 @@ from abc import abstractmethod +from langchain.memory import ConversationBufferMemory + from langflow.custom import Component -from langflow.field_typing import BaseChatMessageHistory, BaseChatMemory +from langflow.field_typing import BaseChatMemory, BaseChatMessageHistory from langflow.template import Output -from langchain.memory import ConversationBufferMemory class LCChatMemoryComponent(Component): @@ -16,20 +17,20 @@ class LCChatMemoryComponent(Component): ) ] - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["build_message_history"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def build_base_memory(self) -> BaseChatMemory: return ConversationBufferMemory(chat_memory=self.build_message_history()) @abstractmethod def build_message_history(self) -> BaseChatMessageHistory: - """ - Builds the chat message history memory. - """ + """Builds the chat message history memory.""" diff --git a/src/backend/base/langflow/base/models/aiml_constants.py b/src/backend/base/langflow/base/models/aiml_constants.py index a962190101a4..48fa89ce4c81 100644 --- a/src/backend/base/langflow/base/models/aiml_constants.py +++ b/src/backend/base/langflow/base/models/aiml_constants.py @@ -1,80 +1,252 @@ AIML_CHAT_MODELS = [ - "zero-one-ai/Yi-34B-Chat", + "#g1_aura-angus-en", + "#g1_aura-arcas-en", + "#g1_aura-asteria-en", + "#g1_aura-athena-en", + "#g1_aura-helios-en", + "#g1_aura-hera-en", + "#g1_aura-luna-en", + "#g1_aura-orion-en", + "#g1_aura-orpheus-en", + "#g1_aura-perseus-en", + "#g1_aura-stella-en", + "#g1_aura-zeus-en", + "#g1_nova-2-automotive", + "#g1_nova-2-conversationalai", + "#g1_nova-2-drivethru", + "#g1_nova-2-finance", + "#g1_nova-2-general", + "#g1_nova-2-medical", + "#g1_nova-2-meeting", + "#g1_nova-2-phonecall", + "#g1_nova-2-video", + "#g1_nova-2-voicemail", + "#g1_redaction", + "#g1_whisper-base", + "#g1_whisper-large", + "#g1_whisper-medium", + "#g1_whisper-small", + "#g1_whisper-tiny", + "Austism/chronos-hermes-13b", + "BAAI/bge-base-en-v1.5", + "BAAI/bge-large-en-v1.5", + "EleutherAI/llemma_7b", + "Gryphe/MythoMax-L2-13b", + "HuggingFaceH4/zephyr-7b-beta", + "Meta-Llama/Llama-Guard-7b", + "Nexusflow/NexusRaven-V2-13B", + "NousResearch/Hermes-2-Theta-Llama-3-70B", + "NousResearch/Nous-Capybara-7B-V1p9", + "NousResearch/Nous-Hermes-13b", + "NousResearch/Nous-Hermes-2-Mistral-7B-DPO", + "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", + "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT", + "NousResearch/Nous-Hermes-2-Yi-34B", + "NousResearch/Nous-Hermes-Llama2-13b", + "NousResearch/Nous-Hermes-Llama2-70b", + "NousResearch/Nous-Hermes-llama-2-7b", + "NumbersStation/nsql-llama-2-7B", + "Open-Orca/Mistral-7B-OpenOrca", + "Phind/Phind-CodeLlama-34B-Python-v1", + "Phind/Phind-CodeLlama-34B-v2", + "Qwen/Qwen1.5-0.5B", + "Qwen/Qwen1.5-0.5B-Chat", + "Qwen/Qwen1.5-1.8B", + "Qwen/Qwen1.5-1.8B-Chat", + "Qwen/Qwen1.5-110B-Chat", + "Qwen/Qwen1.5-14B", + "Qwen/Qwen1.5-14B-Chat", + "Qwen/Qwen1.5-32B", + "Qwen/Qwen1.5-32B-Chat", + "Qwen/Qwen1.5-4B", + "Qwen/Qwen1.5-4B-Chat", + "Qwen/Qwen1.5-72B", + "Qwen/Qwen1.5-72B-Chat", + "Qwen/Qwen1.5-7B", + "Qwen/Qwen1.5-7B-Chat", + "Qwen/Qwen2-1.5B", + "Qwen/Qwen2-1.5B-Instruct", + "Qwen/Qwen2-72B", + "Qwen/Qwen2-72B-Instruct", + "Qwen/Qwen2-7B", + "Qwen/Qwen2-7B-Instruct", + "SG161222/Realistic_Vision_V3.0_VAE", + "Snowflake/snowflake-arctic-instruct", + "Undi95/ReMM-SLERP-L2-13B", + "Undi95/Toppy-M-7B", + "WhereIsAI/UAE-Large-V1", + "WizardLM/WizardCoder-Python-34B-V1.0", + "WizardLM/WizardLM-13B-V1.2", + "WizardLM/WizardLM-70B-V1.0", + "allenai/OLMo-7B", "allenai/OLMo-7B-Instruct", "allenai/OLMo-7B-Twin-2T", - "allenai/OLMo-7B", - "Austism/chronos-hermes-13b", + "bert-base-uncased", + "carson/ml318br", + "chatgpt-4o-latest", + "claude-3-5-sonnet-20240620", + "claude-3-haiku-20240307", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "codellama/CodeLlama-13b-Instruct-hf", + "codellama/CodeLlama-13b-Python-hf", + "codellama/CodeLlama-13b-hf", + "codellama/CodeLlama-34b-Instruct-hf", + "codellama/CodeLlama-34b-Python-hf", + "codellama/CodeLlama-34b-hf", + "codellama/CodeLlama-70b-Instruct-hf", + "codellama/CodeLlama-70b-Python-hf", + "codellama/CodeLlama-70b-hf", + "codellama/CodeLlama-7b-Instruct-hf", + "codellama/CodeLlama-7b-Python-hf", + "codellama/CodeLlama-7b-hf", "cognitivecomputations/dolphin-2.5-mixtral-8x7b", + "dall-e-2", + "dall-e-3", + "databricks/dbrx-instruct", "deepseek-ai/deepseek-coder-33b-instruct", "deepseek-ai/deepseek-llm-67b-chat", + "flux-pro", + "flux-realism", + "flux/dev", + "flux/dev/image-to-image", + "flux/schnell", "garage-bAInd/Platypus2-70B-instruct", + "gemini-1.5-flash", + "gemini-1.5-pro", + "gemini-pro", + "google/gemma-2-27b-it", + "google/gemma-2-9b-it", + "google/gemma-2b", "google/gemma-2b-it", + "google/gemma-7b", "google/gemma-7b-it", - "Gryphe/MythoMax-L2-13b", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-0301", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-instruct", + "gpt-4", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-32k", + "gpt-4-turbo", + "gpt-4-turbo-2024-04-09", + "gpt-4-vision-preview", + "gpt-4o", + "gpt-4o-2024-05-13", + "gpt-4o-2024-08-06", + "gpt-4o-mini", + "gpt-4o-mini-2024-07-18", + "gradientai/Llama-3-70B-Instruct-Gradient-1048k", + "huggyllama/llama-13b", + "huggyllama/llama-30b", + "huggyllama/llama-65b", + "huggyllama/llama-7b", + "lmsys/vicuna-13b-v1.3", "lmsys/vicuna-13b-v1.5", + "lmsys/vicuna-13b-v1.5-16k", + "lmsys/vicuna-7b-v1.3", "lmsys/vicuna-7b-v1.5", - "codellama/CodeLlama-13b-Instruct-hf", - "codellama/CodeLlama-34b-Instruct-hf", - "codellama/CodeLlama-70b-Instruct-hf", - "codellama/CodeLlama-7b-Instruct-hf", + "meta-llama/Llama-2-13b-chat-hf", + "meta-llama/Llama-2-13b-hf", + "meta-llama/Llama-2-70b-chat-hf", + "meta-llama/Llama-2-70b-hf", + "meta-llama/Llama-2-7b-chat-hf", + "meta-llama/Llama-2-7b-hf", + "meta-llama/Llama-3-70b-chat-hf", + "meta-llama/Llama-3-70b-hf", + "meta-llama/Llama-3-8b-chat-hf", + "meta-llama/Llama-3-8b-hf", + "meta-llama/LlamaGuard-2-8b", + "meta-llama/Meta-Llama-3-70B", + "meta-llama/Meta-Llama-3-70B-Instruct", + "meta-llama/Meta-Llama-3-70B-Instruct-Lite", + "meta-llama/Meta-Llama-3-70B-Instruct-Turbo", + "meta-llama/Meta-Llama-3-8B", + "meta-llama/Meta-Llama-3-8B-Instruct", + "meta-llama/Meta-Llama-3-8B-Instruct-Lite", + "meta-llama/Meta-Llama-3-8B-Instruct-Turbo", "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo", + "meta-llama/Meta-Llama-3.1-70B-Instruct-Reference", "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", + "meta-llama/Meta-Llama-3.1-70B-Reference", "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", - "meta-llama/Llama-2-70b-chat-hf", - "meta-llama/Llama-2-13b-chat-hf", - "meta-llama/Llama-2-7b-chat-hf", + "meta-llama/Meta-Llama-3.1-8B-Reference", + "meta-llama/Meta-Llama-Guard-3-8B", + "microsoft/WizardLM-2-8x22B", + "microsoft/phi-2", "mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", + "mistralai/Mistral-7B-Instruct-v0.3", + "mistralai/Mistral-7B-v0.1", + "mistralai/Mixtral-8x22B", + "mistralai/Mixtral-8x22B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1", - "NousResearch/Nous-Capybara-7B-V1p9", - "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT", - "NousResearch/Nous-Hermes-llama-2-7b", - "NousResearch/Nous-Hermes-Llama2-13b", - "NousResearch/Nous-Hermes-2-Yi-34B", + "mistralai/Mixtral-8x7B-v0.1", "openchat/openchat-3.5-1210", - "Open-Orca/Mistral-7B-OpenOrca", - "togethercomputer/Qwen-7B-Chat", - "Qwen/Qwen1.5-0.5B-Chat", - "Qwen/Qwen1.5-1.8B-Chat", - "Qwen/Qwen1.5-4B-Chat", - "Qwen/Qwen1.5-7B-Chat", - "Qwen/Qwen1.5-14B-Chat", - "Qwen/Qwen1.5-72B-Chat", + "prompthero/openjourney", + "runwayml/stable-diffusion-v1-5", + "sentence-transformers/msmarco-bert-base-dot-v5", "snorkelai/Snorkel-Mistral-PairRM-DPO", - "togethercomputer/alpaca-7b", + "stabilityai/stable-diffusion-2-1", + "stabilityai/stable-diffusion-xl-base-1.0", + "stable-diffusion-v3-medium", "teknium/OpenHermes-2-Mistral-7B", "teknium/OpenHermes-2p5-Mistral-7B", - "togethercomputer/falcon-40b-instruct", - "togethercomputer/falcon-7b-instruct", + "togethercomputer/CodeLlama-13b-Instruct", + "togethercomputer/CodeLlama-13b-Python", + "togethercomputer/CodeLlama-34b", + "togethercomputer/CodeLlama-34b-Instruct", + "togethercomputer/CodeLlama-34b-Python", + "togethercomputer/CodeLlama-7b-Instruct", + "togethercomputer/CodeLlama-7b-Python", + "togethercomputer/Koala-13B", + "togethercomputer/Koala-7B", + "togethercomputer/LLaMA-2-7B-32K", "togethercomputer/Llama-2-7B-32K-Instruct", - "togethercomputer/RedPajama-INCITE-Chat-3B-v1", - "togethercomputer/RedPajama-INCITE-7B-Chat", + "togethercomputer/Llama-3-8b-chat-hf-int4", + "togethercomputer/Llama-3-8b-chat-hf-int8", + "togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4", + "togethercomputer/StripedHyena-Hessian-7B", "togethercomputer/StripedHyena-Nous-7B", - "Undi95/ReMM-SLERP-L2-13B", - "Undi95/Toppy-M-7B", - "WizardLM/WizardLM-13B-V1.2", + "togethercomputer/alpaca-7b", + "togethercomputer/evo-1-131k-base", + "togethercomputer/evo-1-8k-base", + "togethercomputer/guanaco-13b", + "togethercomputer/guanaco-33b", + "togethercomputer/guanaco-65b", + "togethercomputer/guanaco-7b", + "togethercomputer/llama-2-13b", + "togethercomputer/llama-2-13b-chat", + "togethercomputer/llama-2-70b", + "togethercomputer/llama-2-70b-chat", + "togethercomputer/llama-2-7b", + "togethercomputer/llama-2-7b-chat", + "togethercomputer/m2-bert-80M-2k-retrieval", + "togethercomputer/m2-bert-80M-32k-retrieval", + "togethercomputer/m2-bert-80M-8k-retrieval", "upstage/SOLAR-10.7B-Instruct-v1.0", - "gpt-4", - "gpt-4-turbo", - "gpt-4-0613", - "gpt-4-32k", - "gpt-4-32k-0613", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-instruct", - "gpt-3.5-turbo-16k", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-16k-0613", - "gpt-4o", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", + "voyage-2", + "voyage-code-2", + "voyage-finance-2", + "voyage-large-2", + "voyage-large-2-instruct", + "voyage-law-2", + "voyage-multilingual-2", + "wavymulder/Analog-Diffusion", + "zero-one-ai/Yi-34B", + "zero-one-ai/Yi-34B-Chat", + "zero-one-ai/Yi-6B", ] AIML_EMBEDDING_MODELS = [ - "text-embedding-3-small", "text-embedding-3-large", + "text-embedding-3-small", "text-embedding-ada-002", + "text-multilingual-embedding-002", + "textembedding-gecko-multilingual@001", + "textembedding-gecko@001", + "textembedding-gecko@003", ] diff --git a/src/backend/base/langflow/base/models/aws_constants.py b/src/backend/base/langflow/base/models/aws_constants.py new file mode 100644 index 000000000000..23fd90dce767 --- /dev/null +++ b/src/backend/base/langflow/base/models/aws_constants.py @@ -0,0 +1,91 @@ +AWS_MODEL_IDs = [ + # Amazon Titan Models + "amazon.titan-text-express-v1", + "amazon.titan-text-lite-v1", + "amazon.titan-text-premier-v1:0", + # Anthropic Models + "anthropic.claude-v2", + "anthropic.claude-v2:1", + "anthropic.claude-3-sonnet-20240229-v1:0", + "anthropic.claude-3-5-sonnet-20240620-v1:0", + "anthropic.claude-3-5-sonnet-20241022-v2:0", + "anthropic.claude-3-haiku-20240307-v1:0", + "anthropic.claude-3-5-haiku-20241022-v1:0", + "anthropic.claude-3-opus-20240229-v1:0", + "anthropic.claude-instant-v1", + # AI21 Labs Models + "ai21.jamba-instruct-v1:0", + "ai21.j2-mid-v1", + "ai21.j2-ultra-v1", + "ai21.jamba-1-5-large-v1:0", + "ai21.jamba-1-5-mini-v1:0", + # Cohere Models + "cohere.command-text-v14", + "cohere.command-light-text-v14", + "cohere.command-r-v1:0", + "cohere.command-r-plus-v1:0", + # Meta Models + "meta.llama2-13b-chat-v1", + "meta.llama2-70b-chat-v1", + "meta.llama3-8b-instruct-v1:0", + "meta.llama3-70b-instruct-v1:0", + "meta.llama3-1-8b-instruct-v1:0", + "meta.llama3-1-70b-instruct-v1:0", + "meta.llama3-1-405b-instruct-v1:0", + "meta.llama3-2-1b-instruct-v1:0", + "meta.llama3-2-3b-instruct-v1:0", + "meta.llama3-2-11b-instruct-v1:0", + "meta.llama3-2-90b-instruct-v1:0", + # Mistral AI Models + "mistral.mistral-7b-instruct-v0:2", + "mistral.mixtral-8x7b-instruct-v0:1", + "mistral.mistral-large-2402-v1:0", + "mistral.mistral-large-2407-v1:0", + "mistral.mistral-small-2402-v1:0", +] + +AWS_EMBEDDING_MODEL_IDS = [ + # Amazon Titan Embedding Models + "amazon.titan-embed-text-v1", + "amazon.titan-embed-text-v2:0", + # Cohere Embedding Models + "cohere.embed-english-v3", + "cohere.embed-multilingual-v3", +] + +AWS_REGIONS = [ + "us-west-2", + "us-west-1", + "us-gov-west-1", + "us-gov-east-1", + "us-east-2", + "us-east-1", + "sa-east-1", + "me-south-1", + "me-central-1", + "il-central-1", + "eu-west-3", + "eu-west-2", + "eu-west-1", + "eu-south-2", + "eu-south-1", + "eu-north-1", + "eu-central-2", + "eu-central-1", + "cn-northwest-1", + "cn-north-1", + "ca-west-1", + "ca-central-1", + "ap-southeast-5", + "ap-southeast-4", + "ap-southeast-3", + "ap-southeast-2", + "ap-southeast-1", + "ap-south-2", + "ap-south-1", + "ap-northeast-3", + "ap-northeast-2", + "ap-northeast-1", + "ap-east-1", + "af-south-1", +] diff --git a/src/backend/base/langflow/base/models/chat_result.py b/src/backend/base/langflow/base/models/chat_result.py new file mode 100644 index 000000000000..288f2410dac5 --- /dev/null +++ b/src/backend/base/langflow/base/models/chat_result.py @@ -0,0 +1,76 @@ +import warnings + +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage + +from langflow.field_typing.constants import LanguageModel +from langflow.schema.message import Message + + +def build_messages_and_runnable( + input_value: str | Message, system_message: str | None, original_runnable: LanguageModel +) -> tuple[list[BaseMessage], LanguageModel]: + messages: list[BaseMessage] = [] + system_message_added = False + runnable = original_runnable + + if input_value: + if isinstance(input_value, Message): + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + if "prompt" in input_value: + prompt = input_value.load_lc_prompt() + if system_message: + prompt.messages = [ + SystemMessage(content=system_message), + *prompt.messages, # type: ignore[has-type] + ] + system_message_added = True + runnable = prompt | runnable + else: + messages.append(input_value.to_lc_message()) + else: + messages.append(HumanMessage(content=input_value)) + + if system_message and not system_message_added: + messages.insert(0, SystemMessage(content=system_message)) + + return messages, runnable + + +def get_chat_result( + runnable: LanguageModel, + input_value: str | Message, + system_message: str | None = None, + config: dict | None = None, + *, + stream: bool = False, +): + if not input_value and not system_message: + msg = "The message you want to send to the model is empty." + raise ValueError(msg) + + messages, runnable = build_messages_and_runnable( + input_value=input_value, system_message=system_message, original_runnable=runnable + ) + + inputs: list | dict = messages or {} + try: + if config and config.get("output_parser") is not None: + runnable |= config["output_parser"] + + if config: + runnable = runnable.with_config( + { + "run_name": config.get("display_name", ""), + "project_name": config.get("get_project_name", lambda: "")(), + "callbacks": config.get("get_langchain_callbacks", list)(), + } + ) + if stream: + return runnable.stream(inputs) + message = runnable.invoke(inputs) + return message.content if hasattr(message, "content") else message + except Exception as e: + if config and config.get("_get_exception_message") and (message := config["_get_exception_message"](e)): + raise ValueError(message) from e + raise diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py index b32540ef6648..ad24f7c25fd8 100644 --- a/src/backend/base/langflow/base/models/model.py +++ b/src/backend/base/langflow/base/models/model.py @@ -1,10 +1,11 @@ +import importlib import json import warnings from abc import abstractmethod -from typing import List, Optional, Union from langchain_core.language_models.llms import LLM from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage +from langchain_core.output_parsers import BaseOutputParser from langflow.base.constants import STREAM_INFO_TEXT from langflow.custom import Component @@ -20,15 +21,18 @@ class LCModelComponent(Component): description: str = "Model Description" trace_type = "llm" - _base_inputs: List[InputTypes] = [ + # Optional output parser to pass to the runnable. Subclasses may allow the user to input an `output_parser` + output_parser: BaseOutputParser | None = None + + _base_inputs: list[InputTypes] = [ MessageInput(name="input_value", display_name="Input"), MessageTextInput( name="system_message", display_name="System Message", info="System message to pass to the model.", - advanced=True, + advanced=False, ), - BoolInput(name="stream", display_name="Stream", info=STREAM_INFO_TEXT, advanced=True), + BoolInput(name="stream", display_name="Stream", info=STREAM_INFO_TEXT, advanced=False), ] outputs = [ @@ -39,31 +43,34 @@ class LCModelComponent(Component): def _get_exception_message(self, e: Exception): return str(e) - def _validate_outputs(self): + def _validate_outputs(self) -> None: # At least these two outputs must be defined required_output_methods = ["text_response", "build_model"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def text_response(self) -> Message: input_value = self.input_value stream = self.stream system_message = self.system_message output = self.build_model() - result = self.get_chat_result(output, stream, input_value, system_message) + result = self.get_chat_result( + runnable=output, stream=stream, input_value=input_value, system_message=system_message + ) self.status = result return result - def get_result(self, runnable: LLM, stream: bool, input_value: str): - """ - Retrieves the result from the output of a Runnable object. + def get_result(self, *, runnable: LLM, stream: bool, input_value: str): + """Retrieves the result from the output of a Runnable object. Args: - output (Runnable): The output object to retrieve the result from. + runnable (Runnable): The runnable to retrieve the result from. stream (bool): Indicates whether to use streaming or invocation mode. input_value (str): The input value to pass to the output object. @@ -77,15 +84,15 @@ def get_result(self, runnable: LLM, stream: bool, input_value: str): message = runnable.invoke(input_value) result = message.content if hasattr(message, "content") else message self.status = result - return result except Exception as e: if message := self._get_exception_message(e): raise ValueError(message) from e - raise e + raise + + return result def build_status_message(self, message: AIMessage): - """ - Builds a status message from an AIMessage object. + """Builds a status message from an AIMessage object. Args: message (AIMessage): The AIMessage object to build the status message from. @@ -128,21 +135,23 @@ def build_status_message(self, message: AIMessage): } } else: - status_message = f"Response: {content}" # type: ignore + status_message = f"Response: {content}" # type: ignore[assignment] else: - status_message = f"Response: {message.content}" # type: ignore + status_message = f"Response: {message.content}" # type: ignore[assignment] return status_message def get_chat_result( self, + *, runnable: LanguageModel, stream: bool, input_value: str | Message, - system_message: Optional[str] = None, + system_message: str | None = None, ): - messages: list[Union[BaseMessage]] = [] + messages: list[BaseMessage] = [] if not input_value and not system_message: - raise ValueError("The message you want to send to the model is empty.") + msg = "The message you want to send to the model is empty." + raise ValueError(msg) system_message_added = False if input_value: if isinstance(input_value, Message): @@ -151,7 +160,10 @@ def get_chat_result( if "prompt" in input_value: prompt = input_value.load_lc_prompt() if system_message: - prompt.messages = [SystemMessage(content=system_message)] + prompt.messages + prompt.messages = [ + SystemMessage(content=system_message), + *prompt.messages, # type: ignore[has-type] + ] system_message_added = True runnable = prompt | runnable else: @@ -160,10 +172,13 @@ def get_chat_result( messages.append(HumanMessage(content=input_value)) if system_message and not system_message_added: - messages.append(SystemMessage(content=system_message)) - inputs: Union[list, dict] = messages or {} + messages.insert(0, SystemMessage(content=system_message)) + inputs: list | dict = messages or {} try: - runnable = runnable.with_config( # type: ignore + if self.output_parser is not None: + runnable |= self.output_parser + + runnable = runnable.with_config( { "run_name": self.display_name, "project_name": self.get_project_name(), @@ -171,26 +186,84 @@ def get_chat_result( } ) if stream: - return runnable.stream(inputs) # type: ignore + return runnable.stream(inputs) + message = runnable.invoke(inputs) + result = message.content if hasattr(message, "content") else message + if isinstance(message, AIMessage): + status_message = self.build_status_message(message) + self.status = status_message + elif isinstance(result, dict): + result = json.dumps(message, indent=4) + self.status = result else: - message = runnable.invoke(inputs) # type: ignore - result = message.content if hasattr(message, "content") else message - if isinstance(message, AIMessage): - status_message = self.build_status_message(message) - self.status = status_message - elif isinstance(result, dict): - result = json.dumps(message, indent=4) - self.status = result - else: - self.status = result - return result + self.status = result except Exception as e: if message := self._get_exception_message(e): raise ValueError(message) from e - raise e + raise + + return result @abstractmethod def build_model(self) -> LanguageModel: # type: ignore[type-var] + """Implement this method to build the model.""" + + def get_llm(self, provider_name: str, model_info: dict[str, dict[str, str | list[InputTypes]]]) -> LanguageModel: + """Get LLM model based on provider name and inputs. + + Args: + provider_name: Name of the model provider (e.g., "OpenAI", "Azure OpenAI") + inputs: Dictionary of input parameters for the model + model_info: Dictionary of model information + + Returns: + Built LLM model instance """ - Implement this method to build the model. + try: + if provider_name not in [model.get("display_name") for model in model_info.values()]: + msg = f"Unknown model provider: {provider_name}" + raise ValueError(msg) + + # Find the component class name from MODEL_INFO in a single iteration + component_info, module_name = next( + ((info, key) for key, info in model_info.items() if info.get("display_name") == provider_name), + (None, None), + ) + if not component_info: + msg = f"Component information not found for {provider_name}" + raise ValueError(msg) + component_inputs = component_info.get("inputs", []) + # Get the component class from the models module + # Ensure component_inputs is a list of the expected types + if not isinstance(component_inputs, list): + component_inputs = [] + models_module = importlib.import_module("langflow.components.models") + component_class = getattr(models_module, str(module_name)) + component = component_class() + + return self.build_llm_model_from_inputs(component, component_inputs) + except Exception as e: + msg = f"Error building {provider_name} language model" + raise ValueError(msg) from e + + def build_llm_model_from_inputs( + self, component: Component, inputs: list[InputTypes], prefix: str = "" + ) -> LanguageModel: + """Build LLM model from component and inputs. + + Args: + component: LLM component instance + inputs: Dictionary of input parameters for the model + prefix: Prefix for the input names + Returns: + Built LLM model instance """ + # Ensure prefix is a string + prefix = prefix or "" + # Filter inputs to only include valid component input names + input_data = { + str(component_input.name): getattr(self, f"{prefix}{component_input.name}", None) + for component_input in inputs + } + + return component.set(**input_data).build_model() diff --git a/src/backend/base/langflow/base/models/model_input_constants.py b/src/backend/base/langflow/base/models/model_input_constants.py new file mode 100644 index 000000000000..1753c4a4d0bd --- /dev/null +++ b/src/backend/base/langflow/base/models/model_input_constants.py @@ -0,0 +1,79 @@ +from langflow.base.models.model import LCModelComponent +from langflow.components.models.amazon_bedrock import AmazonBedrockComponent +from langflow.components.models.anthropic import AnthropicModelComponent +from langflow.components.models.azure_openai import AzureChatOpenAIComponent +from langflow.components.models.groq import GroqModel +from langflow.components.models.nvidia import NVIDIAModelComponent +from langflow.components.models.openai import OpenAIModelComponent + + +def get_filtered_inputs(component_class): + base_input_names = {field.name for field in LCModelComponent._base_inputs} + return [ + set_advanced_true(input_) if input_.name == "temperature" else input_ + for input_ in component_class().inputs + if input_.name not in base_input_names + ] + + +def set_advanced_true(component_input): + component_input.advanced = True + return component_input + + +def create_input_fields_dict(inputs, prefix): + return {f"{prefix}{input_.name}": input_ for input_ in inputs} + + +OPENAI_INPUTS = get_filtered_inputs(OpenAIModelComponent) +AZURE_INPUTS = get_filtered_inputs(AzureChatOpenAIComponent) +GROQ_INPUTS = get_filtered_inputs(GroqModel) +ANTHROPIC_INPUTS = get_filtered_inputs(AnthropicModelComponent) +NVIDIA_INPUTS = get_filtered_inputs(NVIDIAModelComponent) +AMAZON_BEDROCK_INPUTS = get_filtered_inputs(AmazonBedrockComponent) + +OPENAI_FIELDS = {input_.name: input_ for input_ in OPENAI_INPUTS} + + +AZURE_FIELDS = create_input_fields_dict(AZURE_INPUTS, "") +GROQ_FIELDS = create_input_fields_dict(GROQ_INPUTS, "") +ANTHROPIC_FIELDS = create_input_fields_dict(ANTHROPIC_INPUTS, "") +NVIDIA_FIELDS = create_input_fields_dict(NVIDIA_INPUTS, "") +AMAZON_BEDROCK_FIELDS = create_input_fields_dict(AMAZON_BEDROCK_INPUTS, "") + +MODEL_PROVIDERS = ["Azure OpenAI", "OpenAI", "Groq", "Anthropic", "NVIDIA", "Amazon Bedrock"] + +MODEL_PROVIDERS_DICT = { + "Azure OpenAI": { + "fields": AZURE_FIELDS, + "inputs": AZURE_INPUTS, + "prefix": "", + "component_class": AzureChatOpenAIComponent(), + }, + "OpenAI": { + "fields": OPENAI_FIELDS, + "inputs": OPENAI_INPUTS, + "prefix": "", + "component_class": OpenAIModelComponent(), + }, + "Groq": {"fields": GROQ_FIELDS, "inputs": GROQ_INPUTS, "prefix": "groq_", "component_class": GroqModel()}, + "Anthropic": { + "fields": ANTHROPIC_FIELDS, + "inputs": ANTHROPIC_INPUTS, + "prefix": "", + "component_class": AnthropicModelComponent(), + }, + "NVIDIA": { + "fields": NVIDIA_FIELDS, + "inputs": NVIDIA_INPUTS, + "prefix": "", + "component_class": NVIDIAModelComponent(), + }, + "Amazon Bedrock": { + "fields": AMAZON_BEDROCK_FIELDS, + "inputs": AMAZON_BEDROCK_INPUTS, + "prefix": "", + "component_class": AmazonBedrockComponent(), + }, +} +ALL_PROVIDER_FIELDS: list[str] = [field for provider in MODEL_PROVIDERS_DICT.values() for field in provider["fields"]] diff --git a/src/backend/base/langflow/base/models/model_utils.py b/src/backend/base/langflow/base/models/model_utils.py new file mode 100644 index 000000000000..4f33ac69ca04 --- /dev/null +++ b/src/backend/base/langflow/base/models/model_utils.py @@ -0,0 +1,8 @@ +def get_model_name(llm, display_name: str | None = "Custom"): + attributes_to_check = ["model_name", "model", "model_id", "deployment_name"] + + # Use a generator expression with next() to find the first matching attribute + model_name = next((getattr(llm, attr) for attr in attributes_to_check if hasattr(llm, attr)), None) + + # If no matching attribute is found, return the class name as a fallback + return model_name if model_name is not None else display_name diff --git a/src/backend/base/langflow/base/prompts/api_utils.py b/src/backend/base/langflow/base/prompts/api_utils.py index fd5ddd9941e2..011135b3415b 100644 --- a/src/backend/base/langflow/base/prompts/api_utils.py +++ b/src/backend/base/langflow/base/prompts/api_utils.py @@ -1,13 +1,12 @@ from collections import defaultdict -from typing import Any, Dict, List, Optional +from typing import Any from fastapi import HTTPException from langchain_core.prompts import PromptTemplate from loguru import logger -from langflow.interface.utils import extract_input_variables_from_prompt from langflow.inputs.inputs import DefaultPromptField - +from langflow.interface.utils import extract_input_variables_from_prompt _INVALID_CHARACTERS = { " ", @@ -63,7 +62,7 @@ def _fix_variable(var, invalid_chars, wrong_variables): new_var, invalid_chars, wrong_variables = _fix_variable(var[1:], invalid_chars, wrong_variables) # Temporarily replace {{ and }} to avoid treating them as invalid - new_var = new_var.replace("{{", "ᴛᴇᴍᴘᴏᴘᴇɴ").replace("}}", "ᴛᴇᴍᴘᴄʟᴏsᴇ") + new_var = new_var.replace("{{", "ᴛᴇᴍᴘᴏᴘᴇɴ").replace("}}", "ᴛᴇᴍᴘᴄʟᴏsᴇ") # noqa: RUF001 # Remove invalid characters for char in new_var: @@ -74,7 +73,7 @@ def _fix_variable(var, invalid_chars, wrong_variables): wrong_variables.append(var) # Restore {{ and }} - new_var = new_var.replace("ᴛᴇᴍᴘᴏᴘᴇɴ", "{{").replace("ᴛᴇᴍᴘᴄʟᴏsᴇ", "}}") + new_var = new_var.replace("ᴛᴇᴍᴘᴏᴘᴇɴ", "{{").replace("ᴛᴇᴍᴘᴄʟᴏsᴇ", "}}") # noqa: RUF001 return new_var, invalid_chars, wrong_variables @@ -87,7 +86,7 @@ def _check_variable(var, invalid_chars, wrong_variables, empty_variables): return wrong_variables, empty_variables -def _check_for_errors(input_variables, fixed_variables, wrong_variables, empty_variables): +def _check_for_errors(input_variables, fixed_variables, wrong_variables, empty_variables) -> None: if any(var for var in input_variables if var not in fixed_variables): error_message = ( f"Error: Input variables contain invalid characters or formats. \n" @@ -122,20 +121,22 @@ def _check_input_variables(input_variables): return fixed_variables -def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[str]: +def validate_prompt(prompt_template: str, *, silent_errors: bool = False) -> list[str]: input_variables = extract_input_variables_from_prompt(prompt_template) # Check if there are invalid characters in the input_variables input_variables = _check_input_variables(input_variables) if any(var in _INVALID_NAMES for var in input_variables): - raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ") + msg = f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. " + raise ValueError(msg) try: PromptTemplate(template=prompt_template, input_variables=input_variables) except Exception as exc: - logger.error(f"Invalid prompt: {exc}") + msg = f"Invalid prompt: {exc}" + logger.exception(msg) if not silent_errors: - raise ValueError(f"Invalid prompt: {exc}") from exc + raise ValueError(msg) from exc return input_variables @@ -145,7 +146,7 @@ def get_old_custom_fields(custom_fields, name): if len(custom_fields) == 1 and name == "": # If there is only one custom field and the name is empty string # then we are dealing with the first prompt request after the node was created - name = list(custom_fields.keys())[0] + name = next(iter(custom_fields.keys())) old_custom_fields = custom_fields[name] if not old_custom_fields: @@ -158,7 +159,7 @@ def get_old_custom_fields(custom_fields, name): return old_custom_fields -def add_new_variables_to_template(input_variables, custom_fields, template, name): +def add_new_variables_to_template(input_variables, custom_fields, template, name) -> None: for variable in input_variables: try: template_field = DefaultPromptField(name=variable, display_name=variable) @@ -173,11 +174,10 @@ def add_new_variables_to_template(input_variables, custom_fields, template, name custom_fields[name].append(variable) except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -def remove_old_variables_from_template(old_custom_fields, input_variables, custom_fields, template, name): +def remove_old_variables_from_template(old_custom_fields, input_variables, custom_fields, template, name) -> None: for variable in old_custom_fields: if variable not in input_variables: try: @@ -189,17 +189,16 @@ def remove_old_variables_from_template(old_custom_fields, input_variables, custo template.pop(variable, None) except Exception as exc: - logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -def update_input_variables_field(input_variables, template): +def update_input_variables_field(input_variables, template) -> None: if "input_variables" in template: template["input_variables"]["value"] = input_variables def process_prompt_template( - template: str, name: str, custom_fields: Optional[Dict[str, List[str]]], frontend_node_template: Dict[str, Any] + template: str, name: str, custom_fields: dict[str, list[str]] | None, frontend_node_template: dict[str, Any] ): """Process and validate prompt template, update template and custom fields.""" # Validate the prompt template and extract input variables @@ -221,20 +220,4 @@ def process_prompt_template( # Update the input variables field in the template update_input_variables_field(input_variables, frontend_node_template) - # Optional: cleanup fields based on specific conditions - cleanup_prompt_template_fields(input_variables, frontend_node_template) - return input_variables - - -def cleanup_prompt_template_fields(input_variables, template): - """Removes unused fields if the conditions are met in the template.""" - prompt_fields = [ - key for key, field in template.items() if isinstance(field, dict) and field.get("type") == "prompt" - ] - - if len(prompt_fields) == 1: - for key in list(template.keys()): # Use list to copy keys - field = template.get(key, {}) - if isinstance(field, dict) and field.get("type") != "code" and key not in input_variables + prompt_fields: - del template[key] diff --git a/src/backend/base/langflow/base/prompts/utils.py b/src/backend/base/langflow/base/prompts/utils.py index 948d09954650..07bc692b6ebb 100644 --- a/src/backend/base/langflow/base/prompts/utils.py +++ b/src/backend/base/langflow/base/prompts/utils.py @@ -6,8 +6,7 @@ def data_to_string(record: Data) -> str: - """ - Convert a record to a string. + """Convert a record to a string. Args: record (Data): The record to convert. @@ -19,8 +18,7 @@ def data_to_string(record: Data) -> str: def dict_values_to_string(d: dict) -> dict: - """ - Converts the values of a dictionary to strings. + """Converts the values of a dictionary to strings. Args: d (dict): The dictionary whose values need to be converted. @@ -52,8 +50,7 @@ def dict_values_to_string(d: dict) -> dict: def document_to_string(document: Document) -> str: - """ - Convert a document to a string. + """Convert a document to a string. Args: document (Document): The document to convert. diff --git a/src/backend/base/langflow/base/textsplitters/model.py b/src/backend/base/langflow/base/textsplitters/model.py index 624222fb26e4..40d3b928136f 100644 --- a/src/backend/base/langflow/base/textsplitters/model.py +++ b/src/backend/base/langflow/base/textsplitters/model.py @@ -1,58 +1,28 @@ from abc import abstractmethod -from typing import Any +from langchain_core.documents import BaseDocumentTransformer from langchain_text_splitters import TextSplitter -from langflow.custom import Component -from langflow.io import Output -from langflow.schema import Data -from langflow.utils.util import build_loader_repr_from_data +from langflow.base.document_transformers.model import LCDocumentTransformerComponent -class LCTextSplitterComponent(Component): +class LCTextSplitterComponent(LCDocumentTransformerComponent): trace_type = "text_splitter" - outputs = [ - Output(display_name="Data", name="data", method="split_data"), - ] - def _validate_outputs(self): + def _validate_outputs(self) -> None: required_output_methods = ["text_splitter"] output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) - def split_data(self) -> list[Data]: - data_input = self.get_data_input() - documents = [] - - if not isinstance(data_input, list): - data_input = [data_input] - - for _input in data_input: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - splitter = self.build_text_splitter() - docs = splitter.split_documents(documents) - data = self.to_data(docs) - self.repr_value = build_loader_repr_from_data(data) - return data - - @abstractmethod - def get_data_input(self) -> Any: - """ - Get the data input. - """ - pass + def build_document_transformer(self) -> BaseDocumentTransformer: + return self.build_text_splitter() @abstractmethod def build_text_splitter(self) -> TextSplitter: - """ - Build the text splitter. - """ - pass + """Build the text splitter.""" diff --git a/src/backend/base/langflow/base/tools/base.py b/src/backend/base/langflow/base/tools/base.py index c9422e268ad0..e442ce4a666a 100644 --- a/src/backend/base/langflow/base/tools/base.py +++ b/src/backend/base/langflow/base/tools/base.py @@ -2,14 +2,14 @@ def build_status_from_tool(tool: Tool): - """ - Builds a status string representation of a tool. + """Builds a status string representation of a tool. Args: tool (Tool): The tool object to build the status for. Returns: - str: The status string representation of the tool, including its name, description, arguments (if any), and args_schema (if any). + str: The status string representation of the tool, including its name, description, arguments (if any), + and args_schema (if any). """ description_repr = repr(tool.description).strip("'") args_str = "\n".join( diff --git a/src/backend/base/langflow/base/tools/component_tool.py b/src/backend/base/langflow/base/tools/component_tool.py index 3f26cde5ef52..6e3855637beb 100644 --- a/src/backend/base/langflow/base/tools/component_tool.py +++ b/src/backend/base/langflow/base/tools/component_tool.py @@ -1,41 +1,240 @@ -from typing import Any +from __future__ import annotations -from langchain_core.tools import BaseTool, ToolException +import asyncio +import re +from typing import TYPE_CHECKING, Literal -from langflow.custom.custom_component.component import Component +from langchain_core.tools import ToolException +from langchain_core.tools.structured import StructuredTool +from loguru import logger +from pydantic import BaseModel +from langflow.base.tools.constants import TOOL_OUTPUT_NAME +from langflow.io.schema import create_input_schema +from langflow.schema.data import Data +from langflow.schema.message import Message -class ComponentTool(BaseTool): - name: str - description: str - component: "Component" +if TYPE_CHECKING: + from collections.abc import Callable - def __init__(self, component: "Component") -> None: - """Initialize the tool.""" - from langflow.io.schema import create_input_schema + from langchain_core.callbacks import Callbacks + from langchain_core.tools import BaseTool - name = component.name or component.__class__.__name__ - description = component.description or "" - args_schema = create_input_schema(component.inputs) - super().__init__(name=name, description=description, args_schema=args_schema, component=component) - # self.component = component + from langflow.custom.custom_component.component import Component + from langflow.events.event_manager import EventManager + from langflow.inputs.inputs import InputTypes + from langflow.io import Output + from langflow.schema.content_block import ContentBlock - @property - def args(self) -> dict: - schema = self.get_input_schema() - return schema.schema()["properties"] - def _run( - self, - *args: Any, - **kwargs: Any, - ) -> dict: - """Use the tool.""" +def _get_input_type(_input: InputTypes): + if _input.input_types: + if len(_input.input_types) == 1: + return _input.input_types[0] + return " | ".join(_input.input_types) + return _input.field_type + + +def build_description(component: Component, output: Output) -> str: + if not output.required_inputs: + logger.warning(f"Output {output.name} does not have required inputs defined") + + if output.required_inputs: + args = ", ".join( + sorted( + [ + f"{input_name}: {_get_input_type(component._inputs[input_name])}" + for input_name in output.required_inputs + ] + ) + ) + else: + args = "" + return f"{output.method}({args}) - {component.description}" + + +def send_message_noop( + message: Message, + text: str | None = None, # noqa: ARG001 + background_color: str | None = None, # noqa: ARG001 + text_color: str | None = None, # noqa: ARG001 + icon: str | None = None, # noqa: ARG001 + content_blocks: list[ContentBlock] | None = None, # noqa: ARG001 + format_type: Literal["default", "error", "warning", "info"] = "default", # noqa: ARG001 + id_: str | None = None, # noqa: ARG001 + *, + allow_markdown: bool = True, # noqa: ARG001 +) -> Message: + """No-op implementation of send_message.""" + return message + + +def patch_components_send_message(component: Component): + old_send_message = component.send_message + component.send_message = send_message_noop # type: ignore[method-assign, assignment] + return old_send_message + + +def _patch_send_message_decorator(component, func): + """Decorator to patch the send_message method of a component. + + This is useful when we want to use a component as a tool, but we don't want to + send any messages to the UI. With this only the Component calling the tool + will send messages to the UI. + """ + + async def async_wrapper(*args, **kwargs): + original_send_message = component.send_message + component.send_message = send_message_noop + try: + return await func(*args, **kwargs) + finally: + component.send_message = original_send_message + + def sync_wrapper(*args, **kwargs): + original_send_message = component.send_message + component.send_message = send_message_noop + try: + return func(*args, **kwargs) + finally: + component.send_message = original_send_message + + return async_wrapper if asyncio.iscoroutinefunction(func) else sync_wrapper + + +def _build_output_function(component: Component, output_method: Callable, event_manager: EventManager | None = None): + def output_function(*args, **kwargs): + try: + if event_manager: + event_manager.on_build_start(data={"id": component._id}) + component.set(*args, **kwargs) + result = output_method() + if event_manager: + event_manager.on_build_end(data={"id": component._id}) + except Exception as e: + raise ToolException(e) from e + + if isinstance(result, Message): + return result.get_text() + if isinstance(result, Data): + return result.data + if isinstance(result, BaseModel): + return result.model_dump() + return result + + return _patch_send_message_decorator(component, output_function) + + +def _build_output_async_function( + component: Component, output_method: Callable, event_manager: EventManager | None = None +): + async def output_function(*args, **kwargs): try: - results, _ = self.component(**kwargs) - return results + if event_manager: + event_manager.on_build_start(data={"id": component._id}) + component.set(*args, **kwargs) + result = await output_method() + if event_manager: + event_manager.on_build_end(data={"id": component._id}) except Exception as e: - raise ToolException(f"Error running {self.name}: {e}") + raise ToolException(e) from e + if isinstance(result, Message): + return result.get_text() + if isinstance(result, Data): + return result.data + if isinstance(result, BaseModel): + return result.model_dump() + return result + + return _patch_send_message_decorator(component, output_function) + + +def _format_tool_name(name: str): + # format to '^[a-zA-Z0-9_-]+$'." + # to do that we must remove all non-alphanumeric characters + + return re.sub(r"[^a-zA-Z0-9_-]", "-", name) + + +class ComponentToolkit: + def __init__(self, component: Component): + self.component = component + + def get_tools( + self, tool_name: str | None = None, tool_description: str | None = None, callbacks: Callbacks | None = None + ) -> list[BaseTool]: + tools = [] + for output in self.component.outputs: + if output.name == TOOL_OUTPUT_NAME: + continue + if not output.method: + msg = f"Output {output.name} does not have a method defined" + raise ValueError(msg) -ComponentTool.update_forward_refs() + output_method: Callable = getattr(self.component, output.method) + args_schema = None + tool_mode_inputs = [_input for _input in self.component.inputs if getattr(_input, "tool_mode", False)] + if output.required_inputs: + inputs = [ + self.component._inputs[input_name] + for input_name in output.required_inputs + if getattr(self.component, input_name) is None + ] + # If any of the required inputs are not in tool mode, this means + # that when the tool is called it will raise an error. + # so we should raise an error here. + if not all(getattr(_input, "tool_mode", False) for _input in inputs): + non_tool_mode_inputs = [ + input_.name + for input_ in inputs + if not getattr(input_, "tool_mode", False) and input_.name is not None + ] + non_tool_mode_inputs_str = ", ".join(non_tool_mode_inputs) + msg = ( + f"Output '{output.name}' requires inputs that are not in tool mode. " + f"The following inputs are not in tool mode: {non_tool_mode_inputs_str}. " + "Please ensure all required inputs are set to tool mode." + ) + raise ValueError(msg) + args_schema = create_input_schema(inputs) + elif tool_mode_inputs: + args_schema = create_input_schema(tool_mode_inputs) + else: + args_schema = create_input_schema(self.component.inputs) + name = f"{self.component.name}.{output.method}" + formatted_name = _format_tool_name(name) + event_manager = self.component._event_manager + if asyncio.iscoroutinefunction(output_method): + tools.append( + StructuredTool( + name=formatted_name, + description=build_description(self.component, output), + coroutine=_build_output_async_function(self.component, output_method, event_manager), + args_schema=args_schema, + handle_tool_error=True, + callbacks=callbacks, + ) + ) + else: + tools.append( + StructuredTool( + name=formatted_name, + description=build_description(self.component, output), + func=_build_output_function(self.component, output_method, event_manager), + args_schema=args_schema, + handle_tool_error=True, + callbacks=callbacks, + ) + ) + if len(tools) == 1 and (tool_name or tool_description): + tool = tools[0] + tool.name = tool_name or tool.name + tool.description = tool_description or tool.description + elif tool_name or tool_description: + msg = ( + "When passing a tool name or description, there must be only one tool, " + f"but {len(tools)} tools were found." + ) + raise ValueError(msg) + return tools diff --git a/src/backend/base/langflow/base/tools/constants.py b/src/backend/base/langflow/base/tools/constants.py new file mode 100644 index 000000000000..4da1036af47a --- /dev/null +++ b/src/backend/base/langflow/base/tools/constants.py @@ -0,0 +1,2 @@ +TOOL_OUTPUT_NAME = "component_as_tool" +TOOL_OUTPUT_DISPLAY_NAME = "Toolset" diff --git a/src/backend/base/langflow/base/tools/flow_tool.py b/src/backend/base/langflow/base/tools/flow_tool.py index 03936ac5839b..53f96c27fcbd 100644 --- a/src/backend/base/langflow/base/tools/flow_tool.py +++ b/src/backend/base/langflow/base/tools/flow_tool.py @@ -1,23 +1,30 @@ -from typing import Any, List, Optional, Type +from __future__ import annotations + +from typing import TYPE_CHECKING, Any -from asyncer import syncify -from langchain_core.runnables import RunnableConfig from langchain_core.tools import BaseTool, ToolException -from pydantic.v1 import BaseModel +from loguru import logger +from typing_extensions import override from langflow.base.flow_processing.utils import build_data_from_result_data, format_flow_output_data -from langflow.graph.graph.base import Graph -from langflow.graph.vertex.base import Vertex +from langflow.graph.graph.base import Graph # cannot be a part of TYPE_CHECKING # noqa: TCH001 +from langflow.graph.vertex.base import Vertex # cannot be a part of TYPE_CHECKING # noqa: TCH001 from langflow.helpers.flow import build_schema_from_inputs, get_arg_names, get_flow_inputs, run_flow +from langflow.utils.async_helpers import run_until_complete + +if TYPE_CHECKING: + from langchain_core.runnables import RunnableConfig + from pydantic.v1 import BaseModel class FlowTool(BaseTool): name: str description: str - graph: Optional[Graph] = None - flow_id: Optional[str] = None - user_id: Optional[str] = None - inputs: List["Vertex"] = [] + graph: Graph | None = None + flow_id: str | None = None + user_id: str | None = None + session_id: str | None = None + inputs: list[Vertex] = [] get_final_results_only: bool = True @property @@ -25,14 +32,17 @@ def args(self) -> dict: schema = self.get_input_schema() return schema.schema()["properties"] - def get_input_schema(self, config: Optional[RunnableConfig] = None) -> Type[BaseModel]: + @override + def get_input_schema( # type: ignore[misc] + self, config: RunnableConfig | None = None + ) -> type[BaseModel]: """The tool's input schema.""" if self.args_schema is not None: return self.args_schema - elif self.graph is not None: + if self.graph is not None: return build_schema_from_inputs(self.name, get_flow_inputs(self.graph)) - else: - raise ToolException("No input schema available.") + msg = "No input schema available." + raise ToolException(msg) def _run( self, @@ -42,17 +52,20 @@ def _run( """Use the tool.""" args_names = get_arg_names(self.inputs) if len(args_names) == len(args): - kwargs = {arg["arg_name"]: arg_value for arg, arg_value in zip(args_names, args)} + kwargs = {arg["arg_name"]: arg_value for arg, arg_value in zip(args_names, args, strict=True)} elif len(args_names) != len(args) and len(args) != 0: - raise ToolException( - "Number of arguments does not match the number of inputs. Pass keyword arguments instead." - ) + msg = "Number of arguments does not match the number of inputs. Pass keyword arguments instead." + raise ToolException(msg) tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} - run_outputs = syncify(run_flow, raise_sync_error=False)( - tweaks={key: {"input_value": value} for key, value in tweaks.items()}, - flow_id=self.flow_id, - user_id=self.user_id, + run_outputs = run_until_complete( + run_flow( + graph=self.graph, + tweaks={key: {"input_value": value} for key, value in tweaks.items()}, + flow_id=self.flow_id, + user_id=self.user_id, + session_id=self.session_id, + ) ) if not run_outputs: return "No output" @@ -62,31 +75,29 @@ def _run( if run_output is not None: for output in run_output.outputs: if output: - data.extend(build_data_from_result_data(output, get_final_results_only=self.get_final_results_only)) + data.extend(build_data_from_result_data(output)) return format_flow_output_data(data) - def validate_inputs(self, args_names: List[dict[str, str]], args: Any, kwargs: Any): + def validate_inputs(self, args_names: list[dict[str, str]], args: Any, kwargs: Any): """Validate the inputs.""" - if len(args) > 0 and len(args) != len(args_names): - raise ToolException( - "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead." - ) + msg = "Number of positional arguments does not match the number of inputs. Pass keyword arguments instead." + raise ToolException(msg) if len(args) == len(args_names): - kwargs = {arg_name["arg_name"]: arg_value for arg_name, arg_value in zip(args_names, args)} + kwargs = {arg_name["arg_name"]: arg_value for arg_name, arg_value in zip(args_names, args, strict=True)} missing_args = [arg["arg_name"] for arg in args_names if arg["arg_name"] not in kwargs] if missing_args: - raise ToolException(f"Missing required arguments: {', '.join(missing_args)}") + msg = f"Missing required arguments: {', '.join(missing_args)}" + raise ToolException(msg) return kwargs def build_tweaks_dict(self, args, kwargs): args_names = get_arg_names(self.inputs) kwargs = self.validate_inputs(args_names=args_names, args=args, kwargs=kwargs) - tweaks = {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} - return tweaks + return {arg["component_name"]: kwargs[arg["arg_name"]] for arg in args_names} async def _arun( self, @@ -95,10 +106,18 @@ async def _arun( ) -> str: """Use the tool asynchronously.""" tweaks = self.build_tweaks_dict(args, kwargs) + try: + run_id = self.graph.run_id if hasattr(self, "graph") and self.graph else None + except Exception: # noqa: BLE001 + logger.opt(exception=True).warning("Failed to set run_id") + run_id = None run_outputs = await run_flow( tweaks={key: {"input_value": value} for key, value in tweaks.items()}, flow_id=self.flow_id, user_id=self.user_id, + run_id=run_id, + session_id=self.session_id, + graph=self.graph, ) if not run_outputs: return "No output" @@ -108,5 +127,5 @@ async def _arun( if run_output is not None: for output in run_output.outputs: if output: - data.extend(build_data_from_result_data(output, get_final_results_only=self.get_final_results_only)) + data.extend(build_data_from_result_data(output)) return format_flow_output_data(data) diff --git a/src/backend/base/langflow/base/vectorstores/model.py b/src/backend/base/langflow/base/vectorstores/model.py index 415b3021d351..e68f826a9061 100644 --- a/src/backend/base/langflow/base/vectorstores/model.py +++ b/src/backend/base/langflow/base/vectorstores/model.py @@ -1,8 +1,7 @@ -from abc import ABC, ABCMeta, abstractmethod +from abc import abstractmethod from functools import wraps -from typing import List, cast +from typing import TYPE_CHECKING -from langchain_core.documents import Document from loguru import logger from langflow.custom import Component @@ -11,10 +10,17 @@ from langflow.io import Output from langflow.schema import Data +if TYPE_CHECKING: + from langchain_core.documents import Document + def check_cached_vector_store(f): - """ - Decorator to check for cached vector stores, and returns them if they exist. + """Decorator to check for cached vector stores, and returns them if they exist. + + Note: caching only occurs during the execution of a component - they do not persist + across separate invocations of the component. This method exists so that components with + multiple output methods share the same vector store during the same invocation of the + component. """ @wraps(f) @@ -26,34 +32,26 @@ def check_cached(self, *args, **kwargs): self._cached_vector_store = result return result - check_cached._is_cached_vector_store_checked = True + check_cached.is_cached_vector_store_checked = True return check_cached -class EnforceCacheDecoratorMeta(ABCMeta): - """ - Enforces that abstract methods marked with @check_cached_vector_store are implemented with the decorator. - """ - - def __init__(cls, name, bases, dct): - for name, value in dct.items(): - if hasattr(value, "__isabstractmethod__"): - cls._check_method_decorator(name, cls) - super().__init__(name, bases, dct) - - @staticmethod - def _check_method_decorator(name, cls): - method = getattr(cls, name) - - # Check if the method has been marked as decorated by `check_cached_vector_store` - if not getattr(method, "_is_cached_vector_store_checked", False): - raise TypeError(f"Concrete implementation of '{name}' must use '@check_cached_vector_store' decorator.") - - -class LCVectorStoreComponent(Component, ABC, metaclass=EnforceCacheDecoratorMeta): +class LCVectorStoreComponent(Component): # Used to ensure a single vector store is built for each run of the flow _cached_vector_store: VectorStore | None = None + def __init_subclass__(cls, **kwargs): + """Enforces the check cached decorator on all subclasses.""" + super().__init_subclass__(**kwargs) + if hasattr(cls, "build_vector_store"): + method = cls.build_vector_store + if not hasattr(method, "is_cached_vector_store_checked"): + msg = ( + f"The method 'build_vector_store' in class {cls.__name__} " + "must be decorated with @check_cached_vector_store" + ) + raise TypeError(msg) + trace_type = "retriever" outputs = [ Output( @@ -66,14 +64,9 @@ class LCVectorStoreComponent(Component, ABC, metaclass=EnforceCacheDecoratorMeta name="search_results", method="search_documents", ), - Output( - display_name="Vector Store", - name="vector_store", - method="cast_vector_store", - ), ] - def _validate_outputs(self): + def _validate_outputs(self) -> None: # At least these three outputs must be defined required_output_methods = [ "build_base_retriever", @@ -83,9 +76,11 @@ def _validate_outputs(self): output_names = [output.name for output in self.outputs] for method_name in required_output_methods: if method_name not in output_names: - raise ValueError(f"Output with name '{method_name}' must be defined.") - elif not hasattr(self, method_name): - raise ValueError(f"Method '{method_name}' must be defined.") + msg = f"Output with name '{method_name}' must be defined." + raise ValueError(msg) + if not hasattr(self, method_name): + msg = f"Method '{method_name}' must be defined." + raise ValueError(msg) def search_with_vector_store( self, @@ -94,14 +89,15 @@ def search_with_vector_store( vector_store: VectorStore, k=10, **kwargs, - ) -> List[Data]: - """ - Search for data in the vector store based on the input value and search type. + ) -> list[Data]: + """Search for data in the vector store based on the input value and search type. Args: input_value (Text): The input value to search for. search_type (str): The type of search to perform. vector_store (VectorStore): The vector store to search in. + k (int): The number of results to return. + **kwargs: Additional keyword arguments to pass to the vector store search method. Returns: List[Data]: A list of data matching the search criteria. @@ -109,23 +105,18 @@ def search_with_vector_store( Raises: ValueError: If invalid inputs are provided. """ - - docs: List[Document] = [] + docs: list[Document] = [] if input_value and isinstance(input_value, str) and hasattr(vector_store, "search"): docs = vector_store.search(query=input_value, search_type=search_type.lower(), k=k, **kwargs) else: - raise ValueError("Invalid inputs provided.") + msg = "Invalid inputs provided." + raise ValueError(msg) data = docs_to_data(docs) self.status = data return data - def cast_vector_store(self) -> VectorStore: - return cast(VectorStore, self.build_vector_store()) - def build_base_retriever(self) -> Retriever: # type: ignore[type-var] - """ - Builds the BaseRetriever object. - """ + """Builds the BaseRetriever object.""" if self._cached_vector_store is not None: vector_store = self._cached_vector_store else: @@ -137,13 +128,11 @@ def build_base_retriever(self) -> Retriever: # type: ignore[type-var] if self.status is None: self.status = "Retriever built successfully." return retriever - else: - raise ValueError(f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method.") + msg = f"Vector Store {vector_store.__class__.__name__} does not have an as_retriever method." + raise ValueError(msg) - def search_documents(self) -> List[Data]: - """ - Search for documents in the vector store. - """ + def search_documents(self) -> list[Data]: + """Search for documents in the vector store.""" search_query: str = self.search_query if not search_query: self.status = "" @@ -166,15 +155,12 @@ def search_documents(self) -> List[Data]: return search_results def get_retriever_kwargs(self): - """ - Get the retriever kwargs. Implementations can override this method to provide custom retriever kwargs. - """ + """Get the retriever kwargs. Implementations can override this method to provide custom retriever kwargs.""" return {} @abstractmethod @check_cached_vector_store def build_vector_store(self) -> VectorStore: - """ - Builds the Vector Store object. - """ - raise NotImplementedError("build_vector_store method must be implemented.") + """Builds the Vector Store object.""" + msg = "build_vector_store method must be implemented." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/base/vectorstores/utils.py b/src/backend/base/langflow/base/vectorstores/utils.py index c2af08702fcc..d64891edf8b8 100644 --- a/src/backend/base/langflow/base/vectorstores/utils.py +++ b/src/backend/base/langflow/base/vectorstores/utils.py @@ -2,8 +2,7 @@ def chroma_collection_to_data(collection_dict: dict): - """ - Converts a collection of chroma vectors into a list of data. + """Converts a collection of chroma vectors into a list of data. Args: collection_dict (dict): A dictionary containing the collection of chroma vectors. @@ -18,7 +17,6 @@ def chroma_collection_to_data(collection_dict: dict): "text": doc, } if ("metadatas" in collection_dict) and collection_dict["metadatas"][i]: - for key, value in collection_dict["metadatas"][i].items(): - data_dict[key] = value + data_dict.update(collection_dict["metadatas"][i].items()) data.append(Data(**data_dict)) return data diff --git a/src/backend/base/langflow/components/Notion/__init__.py b/src/backend/base/langflow/components/Notion/__init__.py new file mode 100644 index 000000000000..bcbab1feb15f --- /dev/null +++ b/src/backend/base/langflow/components/Notion/__init__.py @@ -0,0 +1,19 @@ +from .add_content_to_page import AddContentToPage +from .create_page import NotionPageCreator +from .list_database_properties import NotionDatabaseProperties +from .list_pages import NotionListPages +from .list_users import NotionUserList +from .page_content_viewer import NotionPageContent +from .search import NotionSearch +from .update_page_property import NotionPageUpdate + +__all__ = [ + "AddContentToPage", + "NotionPageCreator", + "NotionDatabaseProperties", + "NotionListPages", + "NotionUserList", + "NotionPageContent", + "NotionSearch", + "NotionPageUpdate", +] diff --git a/src/backend/base/langflow/components/Notion/add_content_to_page.py b/src/backend/base/langflow/components/Notion/add_content_to_page.py new file mode 100644 index 000000000000..935f1e541f44 --- /dev/null +++ b/src/backend/base/langflow/components/Notion/add_content_to_page.py @@ -0,0 +1,269 @@ +import json +from typing import Any + +import requests +from bs4 import BeautifulSoup +from langchain.tools import StructuredTool +from loguru import logger +from markdown import markdown +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + +MIN_ROWS_IN_TABLE = 3 + + +class AddContentToPage(LCToolComponent): + display_name: str = "Add Content to Page " + description: str = "Convert markdown text to Notion blocks and append them to a Notion page." + documentation: str = "https://developers.notion.com/reference/patch-block-children" + icon = "NotionDirectoryLoader" + + inputs = [ + MultilineInput( + name="markdown_text", + display_name="Markdown Text", + info="The markdown text to convert to Notion blocks.", + ), + StrInput( + name="block_id", + display_name="Page/Block ID", + info="The ID of the page/block to add the content.", + ), + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + ] + + class AddContentToPageSchema(BaseModel): + markdown_text: str = Field(..., description="The markdown text to convert to Notion blocks.") + block_id: str = Field(..., description="The ID of the page/block to add the content.") + + def run_model(self) -> Data: + result = self._add_content_to_page(self.markdown_text, self.block_id) + return Data(data=result, text=json.dumps(result)) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="add_content_to_notion_page", + description="Convert markdown text to Notion blocks and append them to a Notion page.", + func=self._add_content_to_page, + args_schema=self.AddContentToPageSchema, + ) + + def _add_content_to_page(self, markdown_text: str, block_id: str) -> dict[str, Any] | str: + try: + html_text = markdown(markdown_text) + soup = BeautifulSoup(html_text, "html.parser") + blocks = self.process_node(soup) + + url = f"https://api.notion.com/v1/blocks/{block_id}/children" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "children": blocks, + } + + response = requests.patch(url, headers=headers, json=data, timeout=10) + response.raise_for_status() + + return response.json() + except requests.exceptions.RequestException as e: + error_message = f"Error: Failed to add content to Notion page. {e}" + if hasattr(e, "response") and e.response is not None: + error_message += f" Status code: {e.response.status_code}, Response: {e.response.text}" + return error_message + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error adding content to Notion page") + return f"Error: An unexpected error occurred while adding content to Notion page. {e}" + + def process_node(self, node): + blocks = [] + if isinstance(node, str): + text = node.strip() + if text: + if text.startswith("#"): + heading_level = text.count("#", 0, 6) + heading_text = text[heading_level:].strip() + if heading_level in range(3): + blocks.append(self.create_block(f"heading_{heading_level + 1}", heading_text)) + else: + blocks.append(self.create_block("paragraph", text)) + elif node.name == "h1": + blocks.append(self.create_block("heading_1", node.get_text(strip=True))) + elif node.name == "h2": + blocks.append(self.create_block("heading_2", node.get_text(strip=True))) + elif node.name == "h3": + blocks.append(self.create_block("heading_3", node.get_text(strip=True))) + elif node.name == "p": + code_node = node.find("code") + if code_node: + code_text = code_node.get_text() + language, code = self.extract_language_and_code(code_text) + blocks.append(self.create_block("code", code, language=language)) + elif self.is_table(str(node)): + blocks.extend(self.process_table(node)) + else: + blocks.append(self.create_block("paragraph", node.get_text(strip=True))) + elif node.name == "ul": + blocks.extend(self.process_list(node, "bulleted_list_item")) + elif node.name == "ol": + blocks.extend(self.process_list(node, "numbered_list_item")) + elif node.name == "blockquote": + blocks.append(self.create_block("quote", node.get_text(strip=True))) + elif node.name == "hr": + blocks.append(self.create_block("divider", "")) + elif node.name == "img": + blocks.append(self.create_block("image", "", image_url=node.get("src"))) + elif node.name == "a": + blocks.append(self.create_block("bookmark", node.get_text(strip=True), link_url=node.get("href"))) + elif node.name == "table": + blocks.extend(self.process_table(node)) + + for child in node.children: + if isinstance(child, str): + continue + blocks.extend(self.process_node(child)) + + return blocks + + def extract_language_and_code(self, code_text): + lines = code_text.split("\n") + language = lines[0].strip() + code = "\n".join(lines[1:]).strip() + return language, code + + def is_code_block(self, text): + return text.startswith("```") + + def extract_code_block(self, text): + lines = text.split("\n") + language = lines[0].strip("`").strip() + code = "\n".join(lines[1:]).strip("`").strip() + return language, code + + def is_table(self, text): + rows = text.split("\n") + if len(rows) < MIN_ROWS_IN_TABLE: + return False + + has_separator = False + for i, row in enumerate(rows): + if "|" in row: + cells = [cell.strip() for cell in row.split("|")] + cells = [cell for cell in cells if cell] # Remove empty cells + if i == 1 and all(set(cell) <= set("-|") for cell in cells): + has_separator = True + elif not cells: + return False + + return has_separator + + def process_list(self, node, list_type): + blocks = [] + for item in node.find_all("li"): + item_text = item.get_text(strip=True) + checked = item_text.startswith("[x]") + is_checklist = item_text.startswith("[ ]") or checked + + if is_checklist: + item_text = item_text.replace("[x]", "").replace("[ ]", "").strip() + blocks.append(self.create_block("to_do", item_text, checked=checked)) + else: + blocks.append(self.create_block(list_type, item_text)) + return blocks + + def process_table(self, node): + blocks = [] + header_row = node.find("thead").find("tr") if node.find("thead") else None + body_rows = node.find("tbody").find_all("tr") if node.find("tbody") else [] + + if header_row or body_rows: + table_width = max( + len(header_row.find_all(["th", "td"])) if header_row else 0, + *(len(row.find_all(["th", "td"])) for row in body_rows), + ) + + table_block = self.create_block("table", "", table_width=table_width, has_column_header=bool(header_row)) + blocks.append(table_block) + + if header_row: + header_cells = [cell.get_text(strip=True) for cell in header_row.find_all(["th", "td"])] + header_row_block = self.create_block("table_row", header_cells) + blocks.append(header_row_block) + + for row in body_rows: + cells = [cell.get_text(strip=True) for cell in row.find_all(["th", "td"])] + row_block = self.create_block("table_row", cells) + blocks.append(row_block) + + return blocks + + def create_block(self, block_type: str, content: str, **kwargs) -> dict[str, Any]: + block: dict[str, Any] = { + "object": "block", + "type": block_type, + block_type: {}, + } + + if block_type in { + "paragraph", + "heading_1", + "heading_2", + "heading_3", + "bulleted_list_item", + "numbered_list_item", + "quote", + }: + block[block_type]["rich_text"] = [ + { + "type": "text", + "text": { + "content": content, + }, + } + ] + elif block_type == "to_do": + block[block_type]["rich_text"] = [ + { + "type": "text", + "text": { + "content": content, + }, + } + ] + block[block_type]["checked"] = kwargs.get("checked", False) + elif block_type == "code": + block[block_type]["rich_text"] = [ + { + "type": "text", + "text": { + "content": content, + }, + } + ] + block[block_type]["language"] = kwargs.get("language", "plain text") + elif block_type == "image": + block[block_type] = {"type": "external", "external": {"url": kwargs.get("image_url", "")}} + elif block_type == "divider": + pass + elif block_type == "bookmark": + block[block_type]["url"] = kwargs.get("link_url", "") + elif block_type == "table": + block[block_type]["table_width"] = kwargs.get("table_width", 0) + block[block_type]["has_column_header"] = kwargs.get("has_column_header", False) + block[block_type]["has_row_header"] = kwargs.get("has_row_header", False) + elif block_type == "table_row": + block[block_type]["cells"] = [[{"type": "text", "text": {"content": cell}} for cell in content]] + + return block diff --git a/src/backend/base/langflow/components/Notion/create_page.py b/src/backend/base/langflow/components/Notion/create_page.py new file mode 100644 index 000000000000..d38a989887c8 --- /dev/null +++ b/src/backend/base/langflow/components/Notion/create_page.py @@ -0,0 +1,94 @@ +import json +from typing import Any + +import requests +from langchain.tools import StructuredTool +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionPageCreator(LCToolComponent): + display_name: str = "Create Page " + description: str = "A component for creating Notion pages." + documentation: str = "https://docs.langflow.org/integrations/notion/page-create" + icon = "NotionDirectoryLoader" + + inputs = [ + StrInput( + name="database_id", + display_name="Database ID", + info="The ID of the Notion database.", + ), + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + MultilineInput( + name="properties_json", + display_name="Properties (JSON)", + info="The properties of the new page as a JSON string.", + ), + ] + + class NotionPageCreatorSchema(BaseModel): + database_id: str = Field(..., description="The ID of the Notion database.") + properties_json: str = Field(..., description="The properties of the new page as a JSON string.") + + def run_model(self) -> Data: + result = self._create_notion_page(self.database_id, self.properties_json) + if isinstance(result, str): + # An error occurred, return it as text + return Data(text=result) + # Success, return the created page data + output = "Created page properties:\n" + for prop_name, prop_value in result.get("properties", {}).items(): + output += f"{prop_name}: {prop_value}\n" + return Data(text=output, data=result) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="create_notion_page", + description="Create a new page in a Notion database. " + "IMPORTANT: Use the tool to check the Database properties for more details before using this tool.", + func=self._create_notion_page, + args_schema=self.NotionPageCreatorSchema, + ) + + def _create_notion_page(self, database_id: str, properties_json: str) -> dict[str, Any] | str: + if not database_id or not properties_json: + return "Invalid input. Please provide 'database_id' and 'properties_json'." + + try: + properties = json.loads(properties_json) + except json.JSONDecodeError as e: + return f"Invalid properties format. Please provide a valid JSON string. Error: {e}" + + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "parent": {"database_id": database_id}, + "properties": properties, + } + + try: + response = requests.post("https://api.notion.com/v1/pages", headers=headers, json=data, timeout=10) + response.raise_for_status() + return response.json() + except requests.exceptions.RequestException as e: + error_message = f"Failed to create Notion page. Error: {e}" + if hasattr(e, "response") and e.response is not None: + error_message += f" Status code: {e.response.status_code}, Response: {e.response.text}" + return error_message + + def __call__(self, *args, **kwargs): + return self._create_notion_page(*args, **kwargs) diff --git a/src/backend/base/langflow/components/Notion/list_database_properties.py b/src/backend/base/langflow/components/Notion/list_database_properties.py new file mode 100644 index 000000000000..07e9cbb4f0ee --- /dev/null +++ b/src/backend/base/langflow/components/Notion/list_database_properties.py @@ -0,0 +1,68 @@ +import requests +from langchain.tools import StructuredTool +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionDatabaseProperties(LCToolComponent): + display_name: str = "List Database Properties " + description: str = "Retrieve properties of a Notion database." + documentation: str = "https://docs.langflow.org/integrations/notion/list-database-properties" + icon = "NotionDirectoryLoader" + + inputs = [ + StrInput( + name="database_id", + display_name="Database ID", + info="The ID of the Notion database.", + ), + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + ] + + class NotionDatabasePropertiesSchema(BaseModel): + database_id: str = Field(..., description="The ID of the Notion database.") + + def run_model(self) -> Data: + result = self._fetch_database_properties(self.database_id) + if isinstance(result, str): + # An error occurred, return it as text + return Data(text=result) + # Success, return the properties + return Data(text=str(result), data=result) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="notion_database_properties", + description="Retrieve properties of a Notion database. Input should include the database ID.", + func=self._fetch_database_properties, + args_schema=self.NotionDatabasePropertiesSchema, + ) + + def _fetch_database_properties(self, database_id: str) -> dict | str: + url = f"https://api.notion.com/v1/databases/{database_id}" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Notion-Version": "2022-06-28", # Use the latest supported version + } + try: + response = requests.get(url, headers=headers, timeout=10) + response.raise_for_status() + data = response.json() + return data.get("properties", {}) + except requests.exceptions.RequestException as e: + return f"Error fetching Notion database properties: {e}" + except ValueError as e: + return f"Error parsing Notion API response: {e}" + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error fetching Notion database properties") + return f"An unexpected error occurred: {e}" diff --git a/src/backend/base/langflow/components/Notion/list_pages.py b/src/backend/base/langflow/components/Notion/list_pages.py new file mode 100644 index 000000000000..b46b8cae2d2e --- /dev/null +++ b/src/backend/base/langflow/components/Notion/list_pages.py @@ -0,0 +1,122 @@ +import json +from typing import Any + +import requests +from langchain.tools import StructuredTool +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionListPages(LCToolComponent): + display_name: str = "List Pages " + description: str = ( + "Query a Notion database with filtering and sorting. " + "The input should be a JSON string containing the 'filter' and 'sorts' objects. " + "Example input:\n" + '{"filter": {"property": "Status", "select": {"equals": "Done"}}, ' + '"sorts": [{"timestamp": "created_time", "direction": "descending"}]}' + ) + documentation: str = "https://docs.langflow.org/integrations/notion/list-pages" + icon = "NotionDirectoryLoader" + + inputs = [ + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + StrInput( + name="database_id", + display_name="Database ID", + info="The ID of the Notion database to query.", + ), + MultilineInput( + name="query_json", + display_name="Database query (JSON)", + info="A JSON string containing the filters and sorts that will be used for querying the database. " + "Leave empty for no filters or sorts.", + ), + ] + + class NotionListPagesSchema(BaseModel): + database_id: str = Field(..., description="The ID of the Notion database to query.") + query_json: str | None = Field( + default="", + description="A JSON string containing the filters and sorts for querying the database. " + "Leave empty for no filters or sorts.", + ) + + def run_model(self) -> list[Data]: + result = self._query_notion_database(self.database_id, self.query_json) + + if isinstance(result, str): + # An error occurred, return it as a single record + return [Data(text=result)] + + records = [] + combined_text = f"Pages found: {len(result)}\n\n" + + for page in result: + page_data = { + "id": page["id"], + "url": page["url"], + "created_time": page["created_time"], + "last_edited_time": page["last_edited_time"], + "properties": page["properties"], + } + + text = ( + f"id: {page['id']}\n" + f"url: {page['url']}\n" + f"created_time: {page['created_time']}\n" + f"last_edited_time: {page['last_edited_time']}\n" + f"properties: {json.dumps(page['properties'], indent=2)}\n\n" + ) + + combined_text += text + records.append(Data(text=text, **page_data)) + + self.status = records + return records + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="notion_list_pages", + description=self.description, + func=self._query_notion_database, + args_schema=self.NotionListPagesSchema, + ) + + def _query_notion_database(self, database_id: str, query_json: str | None = None) -> list[dict[str, Any]] | str: + url = f"https://api.notion.com/v1/databases/{database_id}/query" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + query_payload = {} + if query_json and query_json.strip(): + try: + query_payload = json.loads(query_json) + except json.JSONDecodeError as e: + return f"Invalid JSON format for query: {e}" + + try: + response = requests.post(url, headers=headers, json=query_payload, timeout=10) + response.raise_for_status() + results = response.json() + return results["results"] + except requests.exceptions.RequestException as e: + return f"Error querying Notion database: {e}" + except KeyError: + return "Unexpected response format from Notion API" + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error querying Notion database") + return f"An unexpected error occurred: {e}" diff --git a/src/backend/base/langflow/components/Notion/list_users.py b/src/backend/base/langflow/components/Notion/list_users.py new file mode 100644 index 000000000000..d99a71e1cdfc --- /dev/null +++ b/src/backend/base/langflow/components/Notion/list_users.py @@ -0,0 +1,77 @@ +import requests +from langchain.tools import StructuredTool +from pydantic import BaseModel + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import SecretStrInput +from langflow.schema import Data + + +class NotionUserList(LCToolComponent): + display_name = "List Users " + description = "Retrieve users from Notion." + documentation = "https://docs.langflow.org/integrations/notion/list-users" + icon = "NotionDirectoryLoader" + + inputs = [ + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + ] + + class NotionUserListSchema(BaseModel): + pass + + def run_model(self) -> list[Data]: + users = self._list_users() + records = [] + combined_text = "" + + for user in users: + output = "User:\n" + for key, value in user.items(): + output += f"{key.replace('_', ' ').title()}: {value}\n" + output += "________________________\n" + + combined_text += output + records.append(Data(text=output, data=user)) + + self.status = records + return records + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="notion_list_users", + description="Retrieve users from Notion.", + func=self._list_users, + args_schema=self.NotionUserListSchema, + ) + + def _list_users(self) -> list[dict]: + url = "https://api.notion.com/v1/users" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Notion-Version": "2022-06-28", + } + + response = requests.get(url, headers=headers, timeout=10) + response.raise_for_status() + + data = response.json() + results = data["results"] + + users = [] + for user in results: + user_data = { + "id": user["id"], + "type": user["type"], + "name": user.get("name", ""), + "avatar_url": user.get("avatar_url", ""), + } + users.append(user_data) + + return users diff --git a/src/backend/base/langflow/components/Notion/page_content_viewer.py b/src/backend/base/langflow/components/Notion/page_content_viewer.py new file mode 100644 index 000000000000..f334ecded68d --- /dev/null +++ b/src/backend/base/langflow/components/Notion/page_content_viewer.py @@ -0,0 +1,93 @@ +import requests +from langchain.tools import StructuredTool +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionPageContent(LCToolComponent): + display_name = "Page Content Viewer " + description = "Retrieve the content of a Notion page as plain text." + documentation = "https://docs.langflow.org/integrations/notion/page-content-viewer" + icon = "NotionDirectoryLoader" + + inputs = [ + StrInput( + name="page_id", + display_name="Page ID", + info="The ID of the Notion page to retrieve.", + ), + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + ] + + class NotionPageContentSchema(BaseModel): + page_id: str = Field(..., description="The ID of the Notion page to retrieve.") + + def run_model(self) -> Data: + result = self._retrieve_page_content(self.page_id) + if isinstance(result, str) and result.startswith("Error:"): + # An error occurred, return it as text + return Data(text=result) + # Success, return the content + return Data(text=result, data={"content": result}) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="notion_page_content", + description="Retrieve the content of a Notion page as plain text.", + func=self._retrieve_page_content, + args_schema=self.NotionPageContentSchema, + ) + + def _retrieve_page_content(self, page_id: str) -> str: + blocks_url = f"https://api.notion.com/v1/blocks/{page_id}/children?page_size=100" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Notion-Version": "2022-06-28", + } + try: + blocks_response = requests.get(blocks_url, headers=headers, timeout=10) + blocks_response.raise_for_status() + blocks_data = blocks_response.json() + return self.parse_blocks(blocks_data.get("results", [])) + except requests.exceptions.RequestException as e: + error_message = f"Error: Failed to retrieve Notion page content. {e}" + if hasattr(e, "response") and e.response is not None: + error_message += f" Status code: {e.response.status_code}, Response: {e.response.text}" + return error_message + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error retrieving Notion page content") + return f"Error: An unexpected error occurred while retrieving Notion page content. {e}" + + def parse_blocks(self, blocks: list) -> str: + content = "" + for block in blocks: + block_type = block.get("type") + if block_type in {"paragraph", "heading_1", "heading_2", "heading_3", "quote"}: + content += self.parse_rich_text(block[block_type].get("rich_text", [])) + "\n\n" + elif block_type in {"bulleted_list_item", "numbered_list_item"}: + content += self.parse_rich_text(block[block_type].get("rich_text", [])) + "\n" + elif block_type == "to_do": + content += self.parse_rich_text(block["to_do"].get("rich_text", [])) + "\n" + elif block_type == "code": + content += self.parse_rich_text(block["code"].get("rich_text", [])) + "\n\n" + elif block_type == "image": + content += f"[Image: {block['image'].get('external', {}).get('url', 'No URL')}]\n\n" + elif block_type == "divider": + content += "---\n\n" + return content.strip() + + def parse_rich_text(self, rich_text: list) -> str: + return "".join(segment.get("plain_text", "") for segment in rich_text) + + def __call__(self, *args, **kwargs): + return self._retrieve_page_content(*args, **kwargs) diff --git a/src/backend/base/langflow/components/Notion/search.py b/src/backend/base/langflow/components/Notion/search.py new file mode 100644 index 000000000000..7d93a3fa714f --- /dev/null +++ b/src/backend/base/langflow/components/Notion/search.py @@ -0,0 +1,111 @@ +from typing import Any + +import requests +from langchain.tools import StructuredTool +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import DropdownInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionSearch(LCToolComponent): + display_name: str = "Search " + description: str = "Searches all pages and databases that have been shared with an integration." + documentation: str = "https://docs.langflow.org/integrations/notion/search" + icon = "NotionDirectoryLoader" + + inputs = [ + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + StrInput( + name="query", + display_name="Search Query", + info="The text that the API compares page and database titles against.", + ), + DropdownInput( + name="filter_value", + display_name="Filter Type", + info="Limits the results to either only pages or only databases.", + options=["page", "database"], + value="page", + ), + DropdownInput( + name="sort_direction", + display_name="Sort Direction", + info="The direction to sort the results.", + options=["ascending", "descending"], + value="descending", + ), + ] + + class NotionSearchSchema(BaseModel): + query: str = Field(..., description="The search query text.") + filter_value: str = Field(default="page", description="Filter type: 'page' or 'database'.") + sort_direction: str = Field(default="descending", description="Sort direction: 'ascending' or 'descending'.") + + def run_model(self) -> list[Data]: + results = self._search_notion(self.query, self.filter_value, self.sort_direction) + records = [] + combined_text = f"Results found: {len(results)}\n\n" + + for result in results: + result_data = { + "id": result["id"], + "type": result["object"], + "last_edited_time": result["last_edited_time"], + } + + if result["object"] == "page": + result_data["title_or_url"] = result["url"] + text = f"id: {result['id']}\ntitle_or_url: {result['url']}\n" + elif result["object"] == "database": + if "title" in result and isinstance(result["title"], list) and len(result["title"]) > 0: + result_data["title_or_url"] = result["title"][0]["plain_text"] + text = f"id: {result['id']}\ntitle_or_url: {result['title'][0]['plain_text']}\n" + else: + result_data["title_or_url"] = "N/A" + text = f"id: {result['id']}\ntitle_or_url: N/A\n" + + text += f"type: {result['object']}\nlast_edited_time: {result['last_edited_time']}\n\n" + combined_text += text + records.append(Data(text=text, data=result_data)) + + self.status = records + return records + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="notion_search", + description="Search Notion pages and databases. " + "Input should include the search query and optionally filter type and sort direction.", + func=self._search_notion, + args_schema=self.NotionSearchSchema, + ) + + def _search_notion( + self, query: str, filter_value: str = "page", sort_direction: str = "descending" + ) -> list[dict[str, Any]]: + url = "https://api.notion.com/v1/search" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", + } + + data = { + "query": query, + "filter": {"value": filter_value, "property": "object"}, + "sort": {"direction": sort_direction, "timestamp": "last_edited_time"}, + } + + response = requests.post(url, headers=headers, json=data, timeout=10) + response.raise_for_status() + + results = response.json() + return results["results"] diff --git a/src/backend/base/langflow/components/Notion/update_page_property.py b/src/backend/base/langflow/components/Notion/update_page_property.py new file mode 100644 index 000000000000..fe777dca9ff0 --- /dev/null +++ b/src/backend/base/langflow/components/Notion/update_page_property.py @@ -0,0 +1,114 @@ +import json +from typing import Any + +import requests +from langchain.tools import StructuredTool +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class NotionPageUpdate(LCToolComponent): + display_name: str = "Update Page Property " + description: str = "Update the properties of a Notion page." + documentation: str = "https://docs.langflow.org/integrations/notion/page-update" + icon = "NotionDirectoryLoader" + + inputs = [ + StrInput( + name="page_id", + display_name="Page ID", + info="The ID of the Notion page to update.", + ), + MultilineInput( + name="properties", + display_name="Properties", + info="The properties to update on the page (as a JSON string or a dictionary).", + ), + SecretStrInput( + name="notion_secret", + display_name="Notion Secret", + info="The Notion integration token.", + required=True, + ), + ] + + class NotionPageUpdateSchema(BaseModel): + page_id: str = Field(..., description="The ID of the Notion page to update.") + properties: str | dict[str, Any] = Field( + ..., description="The properties to update on the page (as a JSON string or a dictionary)." + ) + + def run_model(self) -> Data: + result = self._update_notion_page(self.page_id, self.properties) + if isinstance(result, str): + # An error occurred, return it as text + return Data(text=result) + # Success, return the updated page data + output = "Updated page properties:\n" + for prop_name, prop_value in result.get("properties", {}).items(): + output += f"{prop_name}: {prop_value}\n" + return Data(text=output, data=result) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="update_notion_page", + description="Update the properties of a Notion page. " + "IMPORTANT: Use the tool to check the Database properties for more details before using this tool.", + func=self._update_notion_page, + args_schema=self.NotionPageUpdateSchema, + ) + + def _update_notion_page(self, page_id: str, properties: str | dict[str, Any]) -> dict[str, Any] | str: + url = f"https://api.notion.com/v1/pages/{page_id}" + headers = { + "Authorization": f"Bearer {self.notion_secret}", + "Content-Type": "application/json", + "Notion-Version": "2022-06-28", # Use the latest supported version + } + + # Parse properties if it's a string + if isinstance(properties, str): + try: + parsed_properties = json.loads(properties) + except json.JSONDecodeError as e: + error_message = f"Invalid JSON format for properties: {e}" + logger.exception(error_message) + return error_message + + else: + parsed_properties = properties + + data = {"properties": parsed_properties} + + try: + logger.info(f"Sending request to Notion API: URL: {url}, Data: {json.dumps(data)}") + response = requests.patch(url, headers=headers, json=data, timeout=10) + response.raise_for_status() + updated_page = response.json() + + logger.info(f"Successfully updated Notion page. Response: {json.dumps(updated_page)}") + except requests.exceptions.HTTPError as e: + error_message = f"HTTP Error occurred: {e}" + if e.response is not None: + error_message += f"\nStatus code: {e.response.status_code}" + error_message += f"\nResponse body: {e.response.text}" + logger.exception(error_message) + return error_message + except requests.exceptions.RequestException as e: + error_message = f"An error occurred while making the request: {e}" + logger.exception(error_message) + return error_message + except Exception as e: # noqa: BLE001 + error_message = f"An unexpected error occurred: {e}" + logger.exception(error_message) + return error_message + + return updated_page + + def __call__(self, *args, **kwargs): + return self._update_notion_page(*args, **kwargs) diff --git a/src/backend/base/langflow/components/__init__.py b/src/backend/base/langflow/components/__init__.py index 7b30052ec6ce..e69de29bb2d1 100644 --- a/src/backend/base/langflow/components/__init__.py +++ b/src/backend/base/langflow/components/__init__.py @@ -1,37 +0,0 @@ -from . import ( - agents, - chains, - documentloaders, - embeddings, - helpers, - inputs, - memories, - models, - outputs, - prompts, - prototypes, - retrievers, - textsplitters, - toolkits, - tools, - vectorstores, -) - -__all__ = [ - "agents", - "chains", - "documentloaders", - "embeddings", - "prompts", - "prototypes", - "models", - "helpers", - "inputs", - "memories", - "outputs", - "retrievers", - "textsplitters", - "toolkits", - "tools", - "vectorstores", -] diff --git a/src/backend/base/langflow/components/agents/CSVAgent.py b/src/backend/base/langflow/components/agents/CSVAgent.py deleted file mode 100644 index f28691ac3ad1..000000000000 --- a/src/backend/base/langflow/components/agents/CSVAgent.py +++ /dev/null @@ -1,27 +0,0 @@ -from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent - -from langflow.base.agents.agent import LCAgentComponent -from langflow.field_typing import AgentExecutor -from langflow.inputs import HandleInput, FileInput, DropdownInput - - -class CSVAgentComponent(LCAgentComponent): - display_name = "CSVAgent" - description = "Construct a CSV agent from a CSV and tools." - documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv" - name = "CSVAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - FileInput(name="path", display_name="File Path", file_types=["csv"], required=True), - DropdownInput( - name="agent_type", - display_name="Agent Type", - advanced=True, - options=["zero-shot-react-description", "openai-functions", "openai-tools"], - value="openai-tools", - ), - ] - - def build_agent(self) -> AgentExecutor: - return create_csv_agent(llm=self.llm, path=self.path, agent_type=self.agent_type, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/agents/CrewAIAgent.py b/src/backend/base/langflow/components/agents/CrewAIAgent.py deleted file mode 100644 index c92ebc27e029..000000000000 --- a/src/backend/base/langflow/components/agents/CrewAIAgent.py +++ /dev/null @@ -1,85 +0,0 @@ -from crewai import Agent # type: ignore - -from langflow.custom import Component -from langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output - - -class CrewAIAgentComponent(Component): - display_name = "CrewAI Agent" - description = "Represents an agent of CrewAI." - documentation: str = "https://docs.crewai.com/how-to/LLM-Connections/" - icon = "CrewAI" - - inputs = [ - MultilineInput(name="role", display_name="Role", info="The role of the agent."), - MultilineInput(name="goal", display_name="Goal", info="The objective of the agent."), - MultilineInput(name="backstory", display_name="Backstory", info="The backstory of the agent."), - HandleInput( - name="tools", - display_name="Tools", - input_types=["Tool"], - is_list=True, - info="Tools at agents disposal", - value=[], - ), - HandleInput( - name="llm", - display_name="Language Model", - info="Language model that will run the agent.", - input_types=["LanguageModel"], - ), - BoolInput( - name="memory", - display_name="Memory", - info="Whether the agent should have memory or not", - advanced=True, - value=True, - ), - BoolInput( - name="verbose", - display_name="Verbose", - advanced=True, - value=False, - ), - BoolInput( - name="allow_delegation", - display_name="Allow Delegation", - info="Whether the agent is allowed to delegate tasks to other agents.", - value=True, - ), - BoolInput( - name="allow_code_execution", - display_name="Allow Code Execution", - info="Whether the agent is allowed to execute code.", - value=False, - advanced=True, - ), - DictInput( - name="kwargs", - display_name="kwargs", - info="kwargs of agent.", - is_list=True, - advanced=True, - ), - ] - - outputs = [ - Output(display_name="Agent", name="output", method="build_output"), - ] - - def build_output(self) -> Agent: - kwargs = self.kwargs if self.kwargs else {} - agent = Agent( - role=self.role, - goal=self.goal, - backstory=self.backstory, - llm=self.llm, - verbose=self.verbose, - memory=self.memory, - tools=self.tools if self.tools else [], - allow_delegation=self.allow_delegation, - allow_code_execution=self.allow_code_execution, - **kwargs, - ) - self.status = repr(agent) - return agent diff --git a/src/backend/base/langflow/components/agents/HierarchicalCrew.py b/src/backend/base/langflow/components/agents/HierarchicalCrew.py deleted file mode 100644 index a69a12554f62..000000000000 --- a/src/backend/base/langflow/components/agents/HierarchicalCrew.py +++ /dev/null @@ -1,39 +0,0 @@ -from crewai import Crew, Process # type: ignore - -from langflow.base.agents.crewai.crew import BaseCrewComponent -from langflow.io import HandleInput - - -class HierarchicalCrewComponent(BaseCrewComponent): - display_name: str = "Hierarchical Crew" - description: str = ( - "Represents a group of agents, defining how they should collaborate and the tasks they should perform." - ) - documentation: str = "https://docs.crewai.com/how-to/Hierarchical/" - icon = "CrewAI" - - inputs = BaseCrewComponent._base_inputs + [ - HandleInput(name="agents", display_name="Agents", input_types=["Agent"], is_list=True), - HandleInput(name="tasks", display_name="Tasks", input_types=["HierarchicalTask"], is_list=True), - HandleInput(name="manager_llm", display_name="Manager LLM", input_types=["LanguageModel"], required=False), - HandleInput(name="manager_agent", display_name="Manager Agent", input_types=["Agent"], required=False), - ] - - def build_crew(self) -> Crew: - tasks, agents = self.get_tasks_and_agents() - crew = Crew( - agents=agents, - tasks=tasks, - process=Process.hierarchical, - verbose=self.verbose, - memory=self.memory, - cache=self.use_cache, - max_rpm=self.max_rpm, - share_crew=self.share_crew, - function_calling_llm=self.function_calling_llm, - manager_agent=self.manager_agent, - manager_llm=self.manager_llm, - step_callback=self.get_step_callback(), - task_callback=self.get_task_callback(), - ) - return crew diff --git a/src/backend/base/langflow/components/agents/JsonAgent.py b/src/backend/base/langflow/components/agents/JsonAgent.py deleted file mode 100644 index d05cd09bfb39..000000000000 --- a/src/backend/base/langflow/components/agents/JsonAgent.py +++ /dev/null @@ -1,32 +0,0 @@ -from pathlib import Path - -import yaml -from langchain.agents import AgentExecutor -from langchain_community.agent_toolkits import create_json_agent -from langchain_community.agent_toolkits.json.toolkit import JsonToolkit -from langchain_community.tools.json.tool import JsonSpec - -from langflow.base.agents.agent import LCAgentComponent -from langflow.inputs import HandleInput, FileInput - - -class JsonAgentComponent(LCAgentComponent): - display_name = "JsonAgent" - description = "Construct a json agent from an LLM and tools." - name = "JsonAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - FileInput(name="path", display_name="File Path", file_types=["json", "yaml", "yml"], required=True), - ] - - def build_agent(self) -> AgentExecutor: - if self.path.endswith("yaml") or self.path.endswith("yml"): - with open(self.path, "r") as file: - yaml_dict = yaml.load(file, Loader=yaml.FullLoader) - spec = JsonSpec(dict_=yaml_dict) - else: - spec = JsonSpec.from_file(Path(self.path)) - toolkit = JsonToolkit(spec=spec) - - return create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py b/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py deleted file mode 100644 index 0864463813fd..000000000000 --- a/src/backend/base/langflow/components/agents/OpenAIToolsAgent.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional, List - -from langchain.agents import create_openai_tools_agent -from langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate - -from langflow.base.agents.agent import LCToolsAgentComponent -from langflow.inputs import MultilineInput -from langflow.inputs.inputs import HandleInput, DataInput -from langflow.schema import Data - - -class OpenAIToolsAgentComponent(LCToolsAgentComponent): - display_name: str = "OpenAI Tools Agent" - description: str = "Agent that uses tools via openai-tools." - icon = "LangChain" - beta = True - name = "OpenAIToolsAgent" - - inputs = LCToolsAgentComponent._base_inputs + [ - HandleInput( - name="llm", - display_name="Language Model", - input_types=["LanguageModel", "ToolEnabledLanguageModel"], - required=True, - ), - MultilineInput( - name="system_prompt", - display_name="System Prompt", - info="System prompt for the agent.", - value="You are a helpful assistant", - ), - MultilineInput( - name="user_prompt", display_name="Prompt", info="This prompt must contain 'input' key.", value="{input}" - ), - DataInput(name="chat_history", display_name="Chat History", is_list=True, advanced=True), - ] - - def get_chat_history_data(self) -> Optional[List[Data]]: - return self.chat_history - - def create_agent_runnable(self): - if "input" not in self.user_prompt: - raise ValueError("Prompt must contain 'input' key.") - messages = [ - ("system", self.system_prompt), - ("placeholder", "{chat_history}"), - HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=["input"], template=self.user_prompt)), - ("placeholder", "{agent_scratchpad}"), - ] - prompt = ChatPromptTemplate.from_messages(messages) - return create_openai_tools_agent(self.llm, self.tools, prompt) diff --git a/src/backend/base/langflow/components/agents/OpenAPIAgent.py b/src/backend/base/langflow/components/agents/OpenAPIAgent.py deleted file mode 100644 index e1972b9ed393..000000000000 --- a/src/backend/base/langflow/components/agents/OpenAPIAgent.py +++ /dev/null @@ -1,46 +0,0 @@ -from pathlib import Path - -import yaml -from langchain.agents import AgentExecutor -from langchain_community.agent_toolkits import create_openapi_agent -from langchain_community.tools.json.tool import JsonSpec -from langchain_community.agent_toolkits.openapi.toolkit import OpenAPIToolkit - -from langflow.base.agents.agent import LCAgentComponent -from langflow.inputs import BoolInput, HandleInput, FileInput -from langchain_community.utilities.requests import TextRequestsWrapper - - -class OpenAPIAgentComponent(LCAgentComponent): - display_name = "OpenAPI Agent" - description = "Agent to interact with OpenAPI API." - name = "OpenAPIAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - FileInput(name="path", display_name="File Path", file_types=["json", "yaml", "yml"], required=True), - BoolInput(name="allow_dangerous_requests", display_name="Allow Dangerous Requests", value=False, required=True), - ] - - def build_agent(self) -> AgentExecutor: - if self.path.endswith("yaml") or self.path.endswith("yml"): - with open(self.path, "r") as file: - yaml_dict = yaml.load(file, Loader=yaml.FullLoader) - spec = JsonSpec(dict_=yaml_dict) - else: - spec = JsonSpec.from_file(Path(self.path)) - requests_wrapper = TextRequestsWrapper() - toolkit = OpenAPIToolkit.from_llm( - llm=self.llm, - json_spec=spec, - requests_wrapper=requests_wrapper, - allow_dangerous_requests=self.allow_dangerous_requests, - ) - - agent_args = self.get_agent_kwargs() - - # This is bit weird - generally other create_*_agent functions have max_iterations in the - # `agent_executor_kwargs`, but openai has this parameter passed directly. - agent_args["max_iterations"] = agent_args["agent_executor_kwargs"]["max_iterations"] - del agent_args["agent_executor_kwargs"]["max_iterations"] - return create_openapi_agent(llm=self.llm, toolkit=toolkit, **agent_args) diff --git a/src/backend/base/langflow/components/agents/SQLAgent.py b/src/backend/base/langflow/components/agents/SQLAgent.py deleted file mode 100644 index 6653fbdfad23..000000000000 --- a/src/backend/base/langflow/components/agents/SQLAgent.py +++ /dev/null @@ -1,33 +0,0 @@ -from langchain.agents import AgentExecutor -from langchain_community.agent_toolkits import SQLDatabaseToolkit -from langchain_community.agent_toolkits.sql.base import create_sql_agent -from langchain_community.utilities import SQLDatabase - -from langflow.base.agents.agent import LCAgentComponent -from langflow.inputs import MessageTextInput, HandleInput - - -class SQLAgentComponent(LCAgentComponent): - display_name = "SQLAgent" - description = "Construct an SQL agent from an LLM and tools." - name = "SQLAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - MessageTextInput(name="database_uri", display_name="Database URI", required=True), - HandleInput( - name="extra_tools", - display_name="Extra Tools", - input_types=["Tool", "BaseTool"], - is_list=True, - advanced=True, - ), - ] - - def build_agent(self) -> AgentExecutor: - db = SQLDatabase.from_uri(self.database_uri) - toolkit = SQLDatabaseToolkit(db=db, llm=self.llm) - agent_args = self.get_agent_kwargs() - agent_args["max_iterations"] = agent_args["agent_executor_kwargs"]["max_iterations"] - del agent_args["agent_executor_kwargs"]["max_iterations"] - return create_sql_agent(llm=self.llm, toolkit=toolkit, extra_tools=self.extra_tools or [], **agent_args) diff --git a/src/backend/base/langflow/components/agents/SequentialCrew.py b/src/backend/base/langflow/components/agents/SequentialCrew.py deleted file mode 100644 index 858be7e82340..000000000000 --- a/src/backend/base/langflow/components/agents/SequentialCrew.py +++ /dev/null @@ -1,36 +0,0 @@ -from crewai import Agent, Crew, Process, Task # type: ignore - -from langflow.base.agents.crewai.crew import BaseCrewComponent -from langflow.io import HandleInput -from langflow.schema.message import Message - - -class SequentialCrewComponent(BaseCrewComponent): - display_name: str = "Sequential Crew" - description: str = "Represents a group of agents with tasks that are executed sequentially." - documentation: str = "https://docs.crewai.com/how-to/Sequential/" - icon = "CrewAI" - - inputs = BaseCrewComponent._base_inputs + [ - HandleInput(name="tasks", display_name="Tasks", input_types=["SequentialTask"], is_list=True), - ] - - def get_tasks_and_agents(self) -> tuple[list[Task], list[Agent]]: - return self.tasks, [task.agent for task in self.tasks] - - def build_crew(self) -> Message: - tasks, agents = self.get_tasks_and_agents() - crew = Crew( - agents=agents, - tasks=tasks, - process=Process.sequential, - verbose=self.verbose, - memory=self.memory, - cache=self.use_cache, - max_rpm=self.max_rpm, - share_crew=self.share_crew, - function_calling_llm=self.function_calling_llm, - step_callback=self.get_step_callback(), - task_callback=self.get_task_callback(), - ) - return crew diff --git a/src/backend/base/langflow/components/agents/SequentialTaskAgent.py b/src/backend/base/langflow/components/agents/SequentialTaskAgent.py deleted file mode 100644 index 58062508c44e..000000000000 --- a/src/backend/base/langflow/components/agents/SequentialTaskAgent.py +++ /dev/null @@ -1,141 +0,0 @@ -from crewai import Agent, Task - -from langflow.base.agents.crewai.tasks import SequentialTask -from langflow.custom import Component -from langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output - - -class SequentialTaskAgentComponent(Component): - display_name = "Sequential Task Agent" - description = "Creates a CrewAI Task and its associated Agent." - documentation = "https://docs.crewai.com/how-to/LLM-Connections/" - icon = "CrewAI" - - inputs = [ - # Agent inputs - MultilineInput(name="role", display_name="Role", info="The role of the agent."), - MultilineInput(name="goal", display_name="Goal", info="The objective of the agent."), - MultilineInput( - name="backstory", - display_name="Backstory", - info="The backstory of the agent.", - ), - HandleInput( - name="tools", - display_name="Tools", - input_types=["Tool"], - is_list=True, - info="Tools at agent's disposal", - value=[], - ), - HandleInput( - name="llm", - display_name="Language Model", - info="Language model that will run the agent.", - input_types=["LanguageModel"], - ), - BoolInput( - name="memory", - display_name="Memory", - info="Whether the agent should have memory or not", - advanced=True, - value=True, - ), - BoolInput( - name="verbose", - display_name="Verbose", - advanced=True, - value=True, - ), - BoolInput( - name="allow_delegation", - display_name="Allow Delegation", - info="Whether the agent is allowed to delegate tasks to other agents.", - value=False, - advanced=True, - ), - BoolInput( - name="allow_code_execution", - display_name="Allow Code Execution", - info="Whether the agent is allowed to execute code.", - value=False, - advanced=True, - ), - DictInput( - name="agent_kwargs", - display_name="Agent kwargs", - info="Additional kwargs for the agent.", - is_list=True, - advanced=True, - ), - # Task inputs - MultilineInput( - name="task_description", - display_name="Task Description", - info="Descriptive text detailing task's purpose and execution.", - ), - MultilineInput( - name="expected_output", - display_name="Expected Task Output", - info="Clear definition of expected task outcome.", - ), - BoolInput( - name="async_execution", - display_name="Async Execution", - value=False, - advanced=True, - info="Boolean flag indicating asynchronous task execution.", - ), - # Chaining input - HandleInput( - name="previous_task", - display_name="Previous Task", - input_types=["SequentialTask"], - info="The previous task in the sequence (for chaining).", - required=False, - ), - ] - - outputs = [ - Output( - display_name="Sequential Task", - name="task_output", - method="build_agent_and_task", - ), - ] - - def build_agent_and_task(self) -> list[SequentialTask]: - # Build the agent - agent_kwargs = self.agent_kwargs or {} - agent = Agent( - role=self.role, - goal=self.goal, - backstory=self.backstory, - llm=self.llm, - verbose=self.verbose, - memory=self.memory, - tools=self.tools if self.tools else [], - allow_delegation=self.allow_delegation, - allow_code_execution=self.allow_code_execution, - **agent_kwargs, - ) - - # Build the task - task = Task( - description=self.task_description, - expected_output=self.expected_output, - agent=agent, - async_execution=self.async_execution, - ) - - # If there's a previous task, create a list of tasks - if self.previous_task: - if isinstance(self.previous_task, list): - tasks = self.previous_task + [task] - else: - tasks = [self.previous_task, task] - else: - tasks = [task] - - self.status = f"Agent: {repr(agent)}\nTask: {repr(task)}" - return tasks diff --git a/src/backend/base/langflow/components/agents/ToolCallingAgent.py b/src/backend/base/langflow/components/agents/ToolCallingAgent.py deleted file mode 100644 index 8ab91d84cab0..000000000000 --- a/src/backend/base/langflow/components/agents/ToolCallingAgent.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Optional, List - -from langchain.agents import create_tool_calling_agent -from langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate -from langflow.base.agents.agent import LCToolsAgentComponent -from langflow.inputs import MultilineInput -from langflow.inputs.inputs import HandleInput, DataInput -from langflow.schema import Data - - -class ToolCallingAgentComponent(LCToolsAgentComponent): - display_name: str = "Tool Calling Agent" - description: str = "Agent that uses tools" - icon = "LangChain" - beta = True - name = "ToolCallingAgent" - - inputs = LCToolsAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - MultilineInput( - name="system_prompt", - display_name="System Prompt", - info="System prompt for the agent.", - value="You are a helpful assistant", - ), - MultilineInput( - name="user_prompt", display_name="Prompt", info="This prompt must contain 'input' key.", value="{input}" - ), - DataInput(name="chat_history", display_name="Chat History", is_list=True, advanced=True), - ] - - def get_chat_history_data(self) -> Optional[List[Data]]: - return self.chat_history - - def create_agent_runnable(self): - if "input" not in self.user_prompt: - raise ValueError("Prompt must contain 'input' key.") - messages = [ - ("system", self.system_prompt), - ("placeholder", "{chat_history}"), - HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=["input"], template=self.user_prompt)), - ("placeholder", "{agent_scratchpad}"), - ] - prompt = ChatPromptTemplate.from_messages(messages) - return create_tool_calling_agent(self.llm, self.tools, prompt) diff --git a/src/backend/base/langflow/components/agents/VectorStoreAgent.py b/src/backend/base/langflow/components/agents/VectorStoreAgent.py deleted file mode 100644 index 9a66c08a68bd..000000000000 --- a/src/backend/base/langflow/components/agents/VectorStoreAgent.py +++ /dev/null @@ -1,19 +0,0 @@ -from langchain.agents import AgentExecutor, create_vectorstore_agent -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit -from langflow.base.agents.agent import LCAgentComponent -from langflow.inputs import HandleInput - - -class VectorStoreAgentComponent(LCAgentComponent): - display_name = "VectorStoreAgent" - description = "Construct an agent from a Vector Store." - name = "VectorStoreAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - HandleInput(name="vectorstore", display_name="Vector Store", input_types=["VectorStoreInfo"], required=True), - ] - - def build_agent(self) -> AgentExecutor: - toolkit = VectorStoreToolkit(vectorstore_info=self.vectorstore, llm=self.llm) - return create_vectorstore_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py deleted file mode 100644 index 727379fc9632..000000000000 --- a/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py +++ /dev/null @@ -1,27 +0,0 @@ -from langchain.agents import create_vectorstore_router_agent -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit - -from langflow.base.agents.agent import LCAgentComponent -from langchain.agents import AgentExecutor -from langflow.inputs import HandleInput - - -class VectorStoreRouterAgentComponent(LCAgentComponent): - display_name = "VectorStoreRouterAgent" - description = "Construct an agent from a Vector Store Router." - name = "VectorStoreRouterAgent" - - inputs = LCAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - HandleInput( - name="vectorstores", - display_name="Vector Stores", - input_types=["VectorStoreInfo"], - is_list=True, - required=True, - ), - ] - - def build_agent(self) -> AgentExecutor: - toolkit = VectorStoreRouterToolkit(vectorstores=self.vectorstores, llm=self.llm) - return create_vectorstore_router_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/agents/XMLAgent.py b/src/backend/base/langflow/components/agents/XMLAgent.py deleted file mode 100644 index 98464b627b0b..000000000000 --- a/src/backend/base/langflow/components/agents/XMLAgent.py +++ /dev/null @@ -1,54 +0,0 @@ -from langchain.agents import create_xml_agent -from langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate - -from langflow.base.agents.agent import LCToolsAgentComponent -from langflow.inputs import MultilineInput -from langflow.inputs.inputs import HandleInput - - -class XMLAgentComponent(LCToolsAgentComponent): - display_name: str = "XML Agent" - description: str = "Agent that uses tools formatting instructions as xml to the Language Model." - icon = "LangChain" - beta = True - name = "XMLAgent" - - inputs = LCToolsAgentComponent._base_inputs + [ - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - MultilineInput( - name="user_prompt", - display_name="Prompt", - value=""" -You are a helpful assistant. Help the user answer any questions. - -You have access to the following tools: - -{tools} - -In order to use a tool, you can use and tags. You will then get back a response in the form - -For example, if you have a tool called 'search' that could run a google search, in order to search for the weather in SF you would respond: - -searchweather in SF - -64 degrees - -When you are done, respond with a final answer between . For example: - -The weather in SF is 64 degrees - -Begin! - -Question: {input} - -{agent_scratchpad} - """, - ), - ] - - def create_agent_runnable(self): - messages = [ - HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=["input"], template=self.user_prompt)) - ] - prompt = ChatPromptTemplate.from_messages(messages) - return create_xml_agent(self.llm, self.tools, prompt) diff --git a/src/backend/base/langflow/components/agents/__init__.py b/src/backend/base/langflow/components/agents/__init__.py index 8bd64bab03d9..a765520ffcbc 100644 --- a/src/backend/base/langflow/components/agents/__init__.py +++ b/src/backend/base/langflow/components/agents/__init__.py @@ -1,15 +1,3 @@ -from .CSVAgent import CSVAgentComponent -from .JsonAgent import JsonAgentComponent -from .SQLAgent import SQLAgentComponent -from .VectorStoreAgent import VectorStoreAgentComponent -from .VectorStoreRouterAgent import VectorStoreRouterAgentComponent -from .XMLAgent import XMLAgentComponent +from .agent import AgentComponent -__all__ = [ - "CSVAgentComponent", - "JsonAgentComponent", - "SQLAgentComponent", - "VectorStoreAgentComponent", - "VectorStoreRouterAgentComponent", - "XMLAgentComponent", -] +__all__ = ["AgentComponent"] diff --git a/src/backend/base/langflow/components/agents/agent.py b/src/backend/base/langflow/components/agents/agent.py new file mode 100644 index 000000000000..d10c898150a5 --- /dev/null +++ b/src/backend/base/langflow/components/agents/agent.py @@ -0,0 +1,213 @@ +from langchain_core.tools import StructuredTool + +from langflow.base.agents.agent import LCToolsAgentComponent +from langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT +from langflow.base.models.model_utils import get_model_name +from langflow.components.helpers import CurrentDateComponent +from langflow.components.helpers.memory import MemoryComponent +from langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent +from langflow.io import BoolInput, DropdownInput, MultilineInput, Output +from langflow.schema.dotdict import dotdict +from langflow.schema.message import Message + + +def set_advanced_true(component_input): + component_input.advanced = True + return component_input + + +class AgentComponent(ToolCallingAgentComponent): + display_name: str = "Agent" + description: str = "Define the agent's instructions, then enter a task to complete using tools." + icon = "bot" + beta = False + name = "Agent" + + memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs] + + inputs = [ + DropdownInput( + name="agent_llm", + display_name="Model Provider", + info="The provider of the language model that the agent will use to generate responses.", + options=[*sorted(MODEL_PROVIDERS_DICT.keys()), "Custom"], + value="OpenAI", + real_time_refresh=True, + input_types=[], + ), + *MODEL_PROVIDERS_DICT["OpenAI"]["inputs"], + MultilineInput( + name="system_prompt", + display_name="Agent Instructions", + info="System Prompt: Initial instructions and context provided to guide the agent's behavior.", + value="You are a helpful assistant that can use tools to answer questions and perform tasks.", + advanced=False, + ), + *LCToolsAgentComponent._base_inputs, + *memory_inputs, + BoolInput( + name="add_current_date_tool", + display_name="Add tool Current Date", + advanced=True, + info="If true, will add a tool to the agent that returns the current date.", + value=True, + ), + ] + outputs = [Output(name="response", display_name="Response", method="message_response")] + + async def message_response(self) -> Message: + llm_model, display_name = self.get_llm() + self.model_name = get_model_name(llm_model, display_name=display_name) + if llm_model is None: + msg = "No language model selected" + raise ValueError(msg) + self.chat_history = self.get_memory_data() + + if self.add_current_date_tool: + if not isinstance(self.tools, list): # type: ignore[has-type] + self.tools = [] + # Convert CurrentDateComponent to a StructuredTool + current_date_tool = CurrentDateComponent().to_toolkit()[0] + if isinstance(current_date_tool, StructuredTool): + self.tools.append(current_date_tool) + else: + msg = "CurrentDateComponent must be converted to a StructuredTool" + raise ValueError(msg) + + if not self.tools: + msg = "Tools are required to run the agent." + raise ValueError(msg) + self.set( + llm=llm_model, + tools=self.tools, + chat_history=self.chat_history, + input_value=self.input_value, + system_prompt=self.system_prompt, + ) + agent = self.create_agent_runnable() + return await self.run_agent(agent) + + def get_memory_data(self): + memory_kwargs = { + component_input.name: getattr(self, f"{component_input.name}") for component_input in self.memory_inputs + } + + return MemoryComponent().set(**memory_kwargs).retrieve_messages() + + def get_llm(self): + if isinstance(self.agent_llm, str): + try: + provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm) + if provider_info: + component_class = provider_info.get("component_class") + display_name = component_class.display_name + inputs = provider_info.get("inputs") + prefix = provider_info.get("prefix", "") + return self._build_llm_model(component_class, inputs, prefix), display_name + except Exception as e: + msg = f"Error building {self.agent_llm} language model" + raise ValueError(msg) from e + return self.agent_llm, None + + def _build_llm_model(self, component, inputs, prefix=""): + model_kwargs = {input_.name: getattr(self, f"{prefix}{input_.name}") for input_ in inputs} + return component.set(**model_kwargs).build_model() + + def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None: + """Delete specified fields from build_config.""" + for field in fields: + build_config.pop(field, None) + + def update_input_types(self, build_config: dotdict) -> dotdict: + """Update input types for all fields in build_config.""" + for key, value in build_config.items(): + if isinstance(value, dict): + if value.get("input_types") is None: + build_config[key]["input_types"] = [] + elif hasattr(value, "input_types") and value.input_types is None: + value.input_types = [] + return build_config + + def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict: + # Iterate over all providers in the MODEL_PROVIDERS_DICT + # Existing logic for updating build_config + if field_name == "agent_llm": + provider_info = MODEL_PROVIDERS_DICT.get(field_value) + if provider_info: + component_class = provider_info.get("component_class") + if component_class and hasattr(component_class, "update_build_config"): + # Call the component class's update_build_config method + build_config = component_class.update_build_config(build_config, field_value, field_name) + + provider_configs: dict[str, tuple[dict, list[dict]]] = { + provider: ( + MODEL_PROVIDERS_DICT[provider]["fields"], + [ + MODEL_PROVIDERS_DICT[other_provider]["fields"] + for other_provider in MODEL_PROVIDERS_DICT + if other_provider != provider + ], + ) + for provider in MODEL_PROVIDERS_DICT + } + if field_value in provider_configs: + fields_to_add, fields_to_delete = provider_configs[field_value] + + # Delete fields from other providers + for fields in fields_to_delete: + self.delete_fields(build_config, fields) + + # Add provider-specific fields + if field_value == "OpenAI" and not any(field in build_config for field in fields_to_add): + build_config.update(fields_to_add) + else: + build_config.update(fields_to_add) + # Reset input types for agent_llm + build_config["agent_llm"]["input_types"] = [] + elif field_value == "Custom": + # Delete all provider fields + self.delete_fields(build_config, ALL_PROVIDER_FIELDS) + # Update with custom component + custom_component = DropdownInput( + name="agent_llm", + display_name="Language Model", + options=[*sorted(MODEL_PROVIDERS_DICT.keys()), "Custom"], + value="Custom", + real_time_refresh=True, + input_types=["LanguageModel"], + ) + build_config.update({"agent_llm": custom_component.to_dict()}) + # Update input types for all fields + build_config = self.update_input_types(build_config) + + # Validate required keys + default_keys = [ + "code", + "_type", + "agent_llm", + "tools", + "input_value", + "add_current_date_tool", + "system_prompt", + "agent_description", + "max_iterations", + "handle_parsing_errors", + "verbose", + ] + missing_keys = [key for key in default_keys if key not in build_config] + if missing_keys: + msg = f"Missing required keys in build_config: {missing_keys}" + raise ValueError(msg) + if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT: + provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm) + if provider_info: + component_class = provider_info.get("component_class") + prefix = provider_info.get("prefix") + if component_class and hasattr(component_class, "update_build_config"): + # Call each component class's update_build_config method + # remove the prefix from the field_name + if isinstance(field_name, str) and isinstance(prefix, str): + field_name = field_name.replace(prefix, "") + build_config = component_class.update_build_config(build_config, field_value, field_name) + + return build_config diff --git a/src/backend/base/langflow/components/assemblyai/__init__.py b/src/backend/base/langflow/components/assemblyai/__init__.py new file mode 100644 index 000000000000..f192f02dcad1 --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/__init__.py @@ -0,0 +1,13 @@ +from .assemblyai_get_subtitles import AssemblyAIGetSubtitles +from .assemblyai_lemur import AssemblyAILeMUR +from .assemblyai_list_transcripts import AssemblyAIListTranscripts +from .assemblyai_poll_transcript import AssemblyAITranscriptionJobPoller +from .assemblyai_start_transcript import AssemblyAITranscriptionJobCreator + +__all__ = [ + "AssemblyAIGetSubtitles", + "AssemblyAILeMUR", + "AssemblyAIListTranscripts", + "AssemblyAITranscriptionJobPoller", + "AssemblyAITranscriptionJobCreator", +] diff --git a/src/backend/base/langflow/components/assemblyai/assemblyai_get_subtitles.py b/src/backend/base/langflow/components/assemblyai/assemblyai_get_subtitles.py new file mode 100644 index 000000000000..687461442cdf --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/assemblyai_get_subtitles.py @@ -0,0 +1,81 @@ +import assemblyai as aai +from loguru import logger + +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, IntInput, Output, SecretStrInput +from langflow.schema import Data + + +class AssemblyAIGetSubtitles(Component): + display_name = "AssemblyAI Get Subtitles" + description = "Export your transcript in SRT or VTT format for subtitles and closed captions" + documentation = "https://www.assemblyai.com/docs" + icon = "AssemblyAI" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Assembly API Key", + info="Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + ), + DataInput( + name="transcription_result", + display_name="Transcription Result", + info="The transcription result from AssemblyAI", + ), + DropdownInput( + name="subtitle_format", + display_name="Subtitle Format", + options=["srt", "vtt"], + value="srt", + info="The format of the captions (SRT or VTT)", + ), + IntInput( + name="chars_per_caption", + display_name="Characters per Caption", + info="The maximum number of characters per caption (0 for no limit)", + value=0, + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Subtitles", name="subtitles", method="get_subtitles"), + ] + + def get_subtitles(self) -> Data: + aai.settings.api_key = self.api_key + + # check if it's an error message from the previous step + if self.transcription_result.data.get("error"): + self.status = self.transcription_result.data["error"] + return self.transcription_result + + try: + transcript_id = self.transcription_result.data["id"] + transcript = aai.Transcript.get_by_id(transcript_id) + except Exception as e: # noqa: BLE001 + error = f"Getting transcription failed: {e}" + logger.opt(exception=True).debug(error) + self.status = error + return Data(data={"error": error}) + + if transcript.status == aai.TranscriptStatus.completed: + subtitles = None + chars_per_caption = self.chars_per_caption if self.chars_per_caption > 0 else None + if self.subtitle_format == "srt": + subtitles = transcript.export_subtitles_srt(chars_per_caption) + else: + subtitles = transcript.export_subtitles_vtt(chars_per_caption) + + result = Data( + subtitles=subtitles, + format=self.subtitle_format, + transcript_id=transcript_id, + chars_per_caption=chars_per_caption, + ) + + self.status = result + return result + self.status = transcript.error + return Data(data={"error": transcript.error}) diff --git a/src/backend/base/langflow/components/assemblyai/assemblyai_lemur.py b/src/backend/base/langflow/components/assemblyai/assemblyai_lemur.py new file mode 100644 index 000000000000..a0d357bcb260 --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/assemblyai_lemur.py @@ -0,0 +1,185 @@ +import assemblyai as aai +from loguru import logger + +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, FloatInput, IntInput, MultilineInput, Output, SecretStrInput +from langflow.schema import Data + + +class AssemblyAILeMUR(Component): + display_name = "AssemblyAI LeMUR" + description = "Apply Large Language Models to spoken data using the AssemblyAI LeMUR framework" + documentation = "https://www.assemblyai.com/docs/lemur" + icon = "AssemblyAI" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Assembly API Key", + info="Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + advanced=False, + ), + DataInput( + name="transcription_result", + display_name="Transcription Result", + info="The transcription result from AssemblyAI", + ), + MultilineInput( + name="prompt", + display_name="Input Prompt", + info="The text to prompt the model", + ), + DropdownInput( + name="final_model", + display_name="Final Model", + options=["claude3_5_sonnet", "claude3_opus", "claude3_haiku", "claude3_sonnet"], + value="claude3_5_sonnet", + info="The model that is used for the final prompt after compression is performed", + advanced=True, + ), + FloatInput( + name="temperature", + display_name="Temperature", + advanced=True, + value=0.0, + info="The temperature to use for the model", + ), + IntInput( + name="max_output_size", + display_name=" Max Output Size", + advanced=True, + value=2000, + info="Max output size in tokens, up to 4000", + ), + DropdownInput( + name="endpoint", + display_name="Endpoint", + options=["task", "summary", "question-answer"], + value="task", + info=( + "The LeMUR endpoint to use. For 'summary' and 'question-answer'," + " no prompt input is needed. See https://www.assemblyai.com/docs/api-reference/lemur/ for more info." + ), + advanced=True, + ), + MultilineInput( + name="questions", + display_name="Questions", + info="Comma-separated list of your questions. Only used if Endpoint is 'question-answer'", + advanced=True, + ), + MultilineInput( + name="transcript_ids", + display_name="Transcript IDs", + info=( + "Comma-separated list of transcript IDs. LeMUR can perform actions over multiple transcripts." + " If provided, the Transcription Result is ignored." + ), + advanced=True, + ), + ] + + outputs = [ + Output(display_name="LeMUR Response", name="lemur_response", method="run_lemur"), + ] + + def run_lemur(self) -> Data: + """Use the LeMUR task endpoint to input the LLM prompt.""" + aai.settings.api_key = self.api_key + + if not self.transcription_result and not self.transcript_ids: + error = "Either a Transcription Result or Transcript IDs must be provided" + self.status = error + return Data(data={"error": error}) + if self.transcription_result and self.transcription_result.data.get("error"): + # error message from the previous step + self.status = self.transcription_result.data["error"] + return self.transcription_result + if self.endpoint == "task" and not self.prompt: + self.status = "No prompt specified for the task endpoint" + return Data(data={"error": "No prompt specified"}) + if self.endpoint == "question-answer" and not self.questions: + error = "No Questions were provided for the question-answer endpoint" + self.status = error + return Data(data={"error": error}) + + # Check for valid transcripts + transcript_ids = None + if self.transcription_result and "id" in self.transcription_result.data: + transcript_ids = [self.transcription_result.data["id"]] + elif self.transcript_ids: + transcript_ids = self.transcript_ids.split(",") or [] + transcript_ids = [t.strip() for t in transcript_ids] + + if not transcript_ids: + error = "Either a valid Transcription Result or valid Transcript IDs must be provided" + self.status = error + return Data(data={"error": error}) + + # Get TranscriptGroup and check if there is any error + transcript_group = aai.TranscriptGroup(transcript_ids=transcript_ids) + transcript_group, failures = transcript_group.wait_for_completion(return_failures=True) + if failures: + error = f"Getting transcriptions failed: {failures[0]}" + self.status = error + return Data(data={"error": error}) + + for t in transcript_group.transcripts: + if t.status == aai.TranscriptStatus.error: + self.status = t.error + return Data(data={"error": t.error}) + + # Perform LeMUR action + try: + response = self.perform_lemur_action(transcript_group, self.endpoint) + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error running LeMUR") + error = f"An Error happened: {e}" + self.status = error + return Data(data={"error": error}) + + result = Data(data=response) + self.status = result + return result + + def perform_lemur_action(self, transcript_group: aai.TranscriptGroup, endpoint: str) -> dict: + logger.info("Endpoint:", endpoint, type(endpoint)) + if endpoint == "task": + result = transcript_group.lemur.task( + prompt=self.prompt, + final_model=self.get_final_model(self.final_model), + temperature=self.temperature, + max_output_size=self.max_output_size, + ) + elif endpoint == "summary": + result = transcript_group.lemur.summarize( + final_model=self.get_final_model(self.final_model), + temperature=self.temperature, + max_output_size=self.max_output_size, + ) + elif endpoint == "question-answer": + questions = self.questions.split(",") + questions = [aai.LemurQuestion(question=q) for q in questions] + result = transcript_group.lemur.question( + questions=questions, + final_model=self.get_final_model(self.final_model), + temperature=self.temperature, + max_output_size=self.max_output_size, + ) + else: + msg = f"Endpoint not supported: {endpoint}" + raise ValueError(msg) + + return result.dict() + + def get_final_model(self, model_name: str) -> aai.LemurModel: + if model_name == "claude3_5_sonnet": + return aai.LemurModel.claude3_5_sonnet + if model_name == "claude3_opus": + return aai.LemurModel.claude3_opus + if model_name == "claude3_haiku": + return aai.LemurModel.claude3_haiku + if model_name == "claude3_sonnet": + return aai.LemurModel.claude3_sonnet + msg = f"Model name not supported: {model_name}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/assemblyai/assemblyai_list_transcripts.py b/src/backend/base/langflow/components/assemblyai/assemblyai_list_transcripts.py new file mode 100644 index 000000000000..de96112bb0f1 --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/assemblyai_list_transcripts.py @@ -0,0 +1,94 @@ +import assemblyai as aai +from loguru import logger + +from langflow.custom import Component +from langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput +from langflow.schema import Data + + +class AssemblyAIListTranscripts(Component): + display_name = "AssemblyAI List Transcripts" + description = "Retrieve a list of transcripts from AssemblyAI with filtering options" + documentation = "https://www.assemblyai.com/docs" + icon = "AssemblyAI" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Assembly API Key", + info="Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + ), + IntInput( + name="limit", + display_name="Limit", + info="Maximum number of transcripts to retrieve (default: 20, use 0 for all)", + value=20, + ), + DropdownInput( + name="status_filter", + display_name="Status Filter", + options=["all", "queued", "processing", "completed", "error"], + value="all", + info="Filter by transcript status", + advanced=True, + ), + MessageTextInput( + name="created_on", + display_name="Created On", + info="Only get transcripts created on this date (YYYY-MM-DD)", + advanced=True, + ), + BoolInput( + name="throttled_only", + display_name="Throttled Only", + info="Only get throttled transcripts, overrides the status filter", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Transcript List", name="transcript_list", method="list_transcripts"), + ] + + def list_transcripts(self) -> list[Data]: + aai.settings.api_key = self.api_key + + params = aai.ListTranscriptParameters() + if self.limit: + params.limit = self.limit + if self.status_filter != "all": + params.status = self.status_filter + if self.created_on and self.created_on.text: + params.created_on = self.created_on.text + if self.throttled_only: + params.throttled_only = True + + try: + transcriber = aai.Transcriber() + + def convert_page_to_data_list(page): + return [Data(**t.dict()) for t in page.transcripts] + + if self.limit == 0: + # paginate over all pages + params.limit = 100 + page = transcriber.list_transcripts(params) + transcripts = convert_page_to_data_list(page) + + while page.page_details.before_id_of_prev_url is not None: + params.before_id = page.page_details.before_id_of_prev_url + page = transcriber.list_transcripts(params) + transcripts.extend(convert_page_to_data_list(page)) + else: + # just one page + page = transcriber.list_transcripts(params) + transcripts = convert_page_to_data_list(page) + + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error listing transcripts") + error_data = Data(data={"error": f"An error occurred: {e}"}) + self.status = [error_data] + return [error_data] + + self.status = transcripts + return transcripts diff --git a/src/backend/base/langflow/components/assemblyai/assemblyai_poll_transcript.py b/src/backend/base/langflow/components/assemblyai/assemblyai_poll_transcript.py new file mode 100644 index 000000000000..13d01e5daa06 --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/assemblyai_poll_transcript.py @@ -0,0 +1,70 @@ +import assemblyai as aai +from loguru import logger + +from langflow.custom import Component +from langflow.field_typing.range_spec import RangeSpec +from langflow.io import DataInput, FloatInput, Output, SecretStrInput +from langflow.schema import Data + + +class AssemblyAITranscriptionJobPoller(Component): + display_name = "AssemblyAI Poll Transcript" + description = "Poll for the status of a transcription job using AssemblyAI" + documentation = "https://www.assemblyai.com/docs" + icon = "AssemblyAI" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Assembly API Key", + info="Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + ), + DataInput( + name="transcript_id", + display_name="Transcript ID", + info="The ID of the transcription job to poll", + ), + FloatInput( + name="polling_interval", + display_name="Polling Interval", + value=3.0, + info="The polling interval in seconds", + advanced=True, + range_spec=RangeSpec(min=3, max=30), + ), + ] + + outputs = [ + Output(display_name="Transcription Result", name="transcription_result", method="poll_transcription_job"), + ] + + def poll_transcription_job(self) -> Data: + """Polls the transcription status until completion and returns the Data.""" + aai.settings.api_key = self.api_key + aai.settings.polling_interval = self.polling_interval + + # check if it's an error message from the previous step + if self.transcript_id.data.get("error"): + self.status = self.transcript_id.data["error"] + return self.transcript_id + + try: + transcript = aai.Transcript.get_by_id(self.transcript_id.data["transcript_id"]) + except Exception as e: # noqa: BLE001 + error = f"Getting transcription failed: {e}" + logger.opt(exception=True).debug(error) + self.status = error + return Data(data={"error": error}) + + if transcript.status == aai.TranscriptStatus.completed: + json_response = transcript.json_response + text = json_response.pop("text", None) + utterances = json_response.pop("utterances", None) + transcript_id = json_response.pop("id", None) + sorted_data = {"text": text, "utterances": utterances, "id": transcript_id} + sorted_data.update(json_response) + data = Data(data=sorted_data) + self.status = data + return data + self.status = transcript.error + return Data(data={"error": transcript.error}) diff --git a/src/backend/base/langflow/components/assemblyai/assemblyai_start_transcript.py b/src/backend/base/langflow/components/assemblyai/assemblyai_start_transcript.py new file mode 100644 index 000000000000..de83a59e3d76 --- /dev/null +++ b/src/backend/base/langflow/components/assemblyai/assemblyai_start_transcript.py @@ -0,0 +1,186 @@ +from pathlib import Path + +import assemblyai as aai +from loguru import logger + +from langflow.custom import Component +from langflow.io import BoolInput, DropdownInput, FileInput, MessageTextInput, Output, SecretStrInput +from langflow.schema import Data + + +class AssemblyAITranscriptionJobCreator(Component): + display_name = "AssemblyAI Start Transcript" + description = "Create a transcription job for an audio file using AssemblyAI with advanced options" + documentation = "https://www.assemblyai.com/docs" + icon = "AssemblyAI" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Assembly API Key", + info="Your AssemblyAI API key. You can get one from https://www.assemblyai.com/", + ), + FileInput( + name="audio_file", + display_name="Audio File", + file_types=[ + "3ga", + "8svx", + "aac", + "ac3", + "aif", + "aiff", + "alac", + "amr", + "ape", + "au", + "dss", + "flac", + "flv", + "m4a", + "m4b", + "m4p", + "m4r", + "mp3", + "mpga", + "ogg", + "oga", + "mogg", + "opus", + "qcp", + "tta", + "voc", + "wav", + "wma", + "wv", + "webm", + "mts", + "m2ts", + "ts", + "mov", + "mp2", + "mp4", + "m4p", + "m4v", + "mxf", + ], + info="The audio file to transcribe", + ), + MessageTextInput( + name="audio_file_url", + display_name="Audio File URL", + info="The URL of the audio file to transcribe (Can be used instead of a File)", + advanced=True, + ), + DropdownInput( + name="speech_model", + display_name="Speech Model", + options=[ + "best", + "nano", + ], + value="best", + info="The speech model to use for the transcription", + advanced=True, + ), + BoolInput( + name="language_detection", + display_name="Automatic Language Detection", + info="Enable automatic language detection", + advanced=True, + ), + MessageTextInput( + name="language_code", + display_name="Language", + info=( + """ + The language of the audio file. Can be set manually if automatic language detection is disabled. + See https://www.assemblyai.com/docs/getting-started/supported-languages """ + "for a list of supported language codes." + ), + advanced=True, + ), + BoolInput( + name="speaker_labels", + display_name="Enable Speaker Labels", + info="Enable speaker diarization", + ), + MessageTextInput( + name="speakers_expected", + display_name="Expected Number of Speakers", + info="Set the expected number of speakers (optional, enter a number)", + advanced=True, + ), + BoolInput( + name="punctuate", + display_name="Punctuate", + info="Enable automatic punctuation", + advanced=True, + value=True, + ), + BoolInput( + name="format_text", + display_name="Format Text", + info="Enable text formatting", + advanced=True, + value=True, + ), + ] + + outputs = [ + Output(display_name="Transcript ID", name="transcript_id", method="create_transcription_job"), + ] + + def create_transcription_job(self) -> Data: + aai.settings.api_key = self.api_key + + # Convert speakers_expected to int if it's not empty + speakers_expected = None + if self.speakers_expected and self.speakers_expected.strip(): + try: + speakers_expected = int(self.speakers_expected) + except ValueError: + self.status = "Error: Expected Number of Speakers must be a valid integer" + return Data(data={"error": "Error: Expected Number of Speakers must be a valid integer"}) + + language_code = self.language_code or None + + config = aai.TranscriptionConfig( + speech_model=self.speech_model, + language_detection=self.language_detection, + language_code=language_code, + speaker_labels=self.speaker_labels, + speakers_expected=speakers_expected, + punctuate=self.punctuate, + format_text=self.format_text, + ) + + audio = None + if self.audio_file: + if self.audio_file_url: + logger.warning("Both an audio file an audio URL were specified. The audio URL was ignored.") + + # Check if the file exists + if not Path(self.audio_file).exists(): + self.status = "Error: Audio file not found" + return Data(data={"error": "Error: Audio file not found"}) + audio = self.audio_file + elif self.audio_file_url: + audio = self.audio_file_url + else: + self.status = "Error: Either an audio file or an audio URL must be specified" + return Data(data={"error": "Error: Either an audio file or an audio URL must be specified"}) + + try: + transcript = aai.Transcriber().submit(audio, config=config) + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error submitting transcription job") + self.status = f"An error occurred: {e}" + return Data(data={"error": f"An error occurred: {e}"}) + + if transcript.error: + self.status = transcript.error + return Data(data={"error": transcript.error}) + result = Data(data={"transcript_id": transcript.id}) + self.status = result + return result diff --git a/src/backend/base/langflow/components/astra_assistants/__init__.py b/src/backend/base/langflow/components/astra_assistants/__init__.py index 515aff38abff..b24554af88d3 100644 --- a/src/backend/base/langflow/components/astra_assistants/__init__.py +++ b/src/backend/base/langflow/components/astra_assistants/__init__.py @@ -1,13 +1,19 @@ +from .astra_assistant_manager import AstraAssistantManager from .create_assistant import AssistantsCreateAssistant +from .create_thread import AssistantsCreateThread +from .dotenv import Dotenv from .get_assistant import AssistantsGetAssistantName +from .getenvvar import GetEnvVar from .list_assistants import AssistantsListAssistants from .run import AssistantsRun -from .getenvvar import GetEnvVar __all__ = [ "AssistantsCreateAssistant", + "AssistantsCreateThread", "AssistantsGetAssistantName", "AssistantsListAssistants", "AssistantsRun", + "AstraAssistantManager", + "Dotenv", "GetEnvVar", ] diff --git a/src/backend/base/langflow/components/astra_assistants/astra_assistant_manager.py b/src/backend/base/langflow/components/astra_assistants/astra_assistant_manager.py new file mode 100644 index 000000000000..989d0162ecfa --- /dev/null +++ b/src/backend/base/langflow/components/astra_assistants/astra_assistant_manager.py @@ -0,0 +1,135 @@ +import asyncio + +from astra_assistants.astra_assistants_manager import AssistantManager +from loguru import logger + +from langflow.base.astra_assistants.util import ( + get_patched_openai_client, + litellm_model_names, + tool_names, + tools_and_names, +) +from langflow.custom.custom_component.component_with_cache import ComponentWithCache +from langflow.inputs import DropdownInput, MultilineInput, StrInput +from langflow.schema.message import Message +from langflow.template import Output + + +class AstraAssistantManager(ComponentWithCache): + display_name = "Astra Assistant Manager" + description = "Manages Assistant Interactions" + icon = "AstraDB" + + inputs = [ + StrInput( + name="instructions", + display_name="Instructions", + info="Instructions for the assistant, think of these as the system prompt.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=litellm_model_names, + value="gpt-4o-mini", + ), + DropdownInput( + display_name="Tool", + name="tool", + options=tool_names, + ), + MultilineInput( + name="user_message", + display_name="User Message", + info="User message to pass to the run.", + ), + MultilineInput( + name="input_thread_id", + display_name="Thread ID (optional)", + info="ID of the thread", + ), + MultilineInput( + name="input_assistant_id", + display_name="Assistant ID (optional)", + info="ID of the assistant", + ), + MultilineInput( + name="env_set", + display_name="Environment Set", + info="Dummy input to allow chaining with Dotenv Component.", + ), + ] + + outputs = [ + Output(display_name="Assistant Response", name="assistant_response", method="get_assistant_response"), + Output(display_name="Tool output", name="tool_output", method="get_tool_output"), + Output(display_name="Thread Id", name="output_thread_id", method="get_thread_id"), + Output(display_name="Assistant Id", name="output_assistant_id", method="get_assistant_id"), + ] + + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.lock = asyncio.Lock() + self.initialized: bool = False + self._assistant_response: Message = None # type: ignore[assignment] + self._tool_output: Message = None # type: ignore[assignment] + self._thread_id: Message = None # type: ignore[assignment] + self._assistant_id: Message = None # type: ignore[assignment] + self.client = get_patched_openai_client(self._shared_component_cache) + + async def get_assistant_response(self) -> Message: + await self.initialize() + return self._assistant_response + + async def get_tool_output(self) -> Message: + await self.initialize() + return self._tool_output + + async def get_thread_id(self) -> Message: + await self.initialize() + return self._thread_id + + async def get_assistant_id(self) -> Message: + await self.initialize() + return self._assistant_id + + async def initialize(self) -> None: + async with self.lock: + if not self.initialized: + await self.process_inputs() + self.initialized = True + + async def process_inputs(self) -> None: + logger.info(f"env_set is {self.env_set}") + logger.info(self.tool) + tools = [] + tool_obj = None + if self.tool: + tool_cls = tools_and_names[self.tool] + tool_obj = tool_cls() + tools.append(tool_obj) + assistant_id = None + thread_id = None + if self.input_assistant_id: + assistant_id = self.input_assistant_id + if self.input_thread_id: + thread_id = self.input_thread_id + assistant_manager = AssistantManager( + instructions=self.instructions, + model=self.model_name, + name="managed_assistant", + tools=tools, + client=self.client, + thread_id=thread_id, + assistant_id=assistant_id, + ) + + content = self.user_message + result = await assistant_manager.run_thread(content=content, tool=tool_obj) + self._assistant_response = Message(text=result["text"]) + if "decision" in result: + self._tool_output = Message(text=str(result["decision"].is_complete)) + else: + self._tool_output = Message(text=result["text"]) + self._thread_id = Message(text=assistant_manager.thread.id) + self._assistant_id = Message(text=assistant_manager.assistant.id) diff --git a/src/backend/base/langflow/components/astra_assistants/create_assistant.py b/src/backend/base/langflow/components/astra_assistants/create_assistant.py index 03b87743704a..1798892f5b4e 100644 --- a/src/backend/base/langflow/components/astra_assistants/create_assistant.py +++ b/src/backend/base/langflow/components/astra_assistants/create_assistant.py @@ -1,13 +1,14 @@ -from astra_assistants import patch # type: ignore -from openai import OpenAI -from langflow.custom import Component -from langflow.inputs import StrInput, MultilineInput -from langflow.template import Output +from loguru import logger + +from langflow.base.astra_assistants.util import get_patched_openai_client +from langflow.custom.custom_component.component_with_cache import ComponentWithCache +from langflow.inputs import MultilineInput, StrInput from langflow.schema.message import Message +from langflow.template import Output -class AssistantsCreateAssistant(Component): - icon = "bot" +class AssistantsCreateAssistant(ComponentWithCache): + icon = "AstraDB" display_name = "Create Assistant" description = "Creates an Assistant and returns it's id" @@ -28,7 +29,8 @@ class AssistantsCreateAssistant(Component): info=( "Model for the assistant.\n\n" "Environment variables for provider credentials can be set with the Dotenv Component.\n\n" - "Models are supported via LiteLLM, see (https://docs.litellm.ai/docs/providers) for supported model names and env vars." + "Models are supported via LiteLLM, " + "see (https://docs.litellm.ai/docs/providers) for supported model names and env vars." ), # refresh_model=True ), @@ -43,13 +45,15 @@ class AssistantsCreateAssistant(Component): Output(display_name="Assistant ID", name="assistant_id", method="process_inputs"), ] + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.client = get_patched_openai_client(self._shared_component_cache) + def process_inputs(self) -> Message: - print(f"env_set is {self.env_set}") - client = patch(OpenAI()) - assistant = client.beta.assistants.create( + logger.info(f"env_set is {self.env_set}") + assistant = self.client.beta.assistants.create( name=self.assistant_name, instructions=self.instructions, model=self.model, ) - message = Message(text=assistant.id) - return message + return Message(text=assistant.id) diff --git a/src/backend/base/langflow/components/astra_assistants/create_thread.py b/src/backend/base/langflow/components/astra_assistants/create_thread.py index d225a1c2852f..c41bee0fe0d0 100644 --- a/src/backend/base/langflow/components/astra_assistants/create_thread.py +++ b/src/backend/base/langflow/components/astra_assistants/create_thread.py @@ -1,15 +1,14 @@ -from astra_assistants import patch # type: ignore -from langflow.custom import Component -from openai import OpenAI +from langflow.base.astra_assistants.util import get_patched_openai_client +from langflow.custom.custom_component.component_with_cache import ComponentWithCache from langflow.inputs import MultilineInput from langflow.schema.message import Message from langflow.template import Output -class AssistantsCreateThread(Component): +class AssistantsCreateThread(ComponentWithCache): display_name = "Create Assistant Thread" description = "Creates a thread and returns the thread id" - + icon = "AstraDB" inputs = [ MultilineInput( name="env_set", @@ -22,11 +21,12 @@ class AssistantsCreateThread(Component): Output(display_name="Thread ID", name="thread_id", method="process_inputs"), ] - def process_inputs(self) -> Message: - client = patch(OpenAI()) + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.client = get_patched_openai_client(self._shared_component_cache) - thread = client.beta.threads.create() + def process_inputs(self) -> Message: + thread = self.client.beta.threads.create() thread_id = thread.id - message = Message(text=thread_id) - return message + return Message(text=thread_id) diff --git a/src/backend/base/langflow/components/astra_assistants/dotenv.py b/src/backend/base/langflow/components/astra_assistants/dotenv.py index 7df4c2915139..83ba991ec1aa 100644 --- a/src/backend/base/langflow/components/astra_assistants/dotenv.py +++ b/src/backend/base/langflow/components/astra_assistants/dotenv.py @@ -1,5 +1,7 @@ import io + from dotenv import load_dotenv + from langflow.custom import Component from langflow.inputs import MultilineSecretInput from langflow.schema.message import Message @@ -9,12 +11,13 @@ class Dotenv(Component): display_name = "Dotenv" description = "Load .env file into env vars" - + icon = "AstraDB" inputs = [ MultilineSecretInput( name="dotenv_file_content", display_name="Dotenv file content", - info="Paste the content of your .env file directly, since contents are sensitive, using a Global variable set as 'password' is recommended", + info="Paste the content of your .env file directly, since contents are sensitive, " + "using a Global variable set as 'password' is recommended", ) ] diff --git a/src/backend/base/langflow/components/astra_assistants/get_assistant.py b/src/backend/base/langflow/components/astra_assistants/get_assistant.py index cae81515de56..8d6f05ed5d27 100644 --- a/src/backend/base/langflow/components/astra_assistants/get_assistant.py +++ b/src/backend/base/langflow/components/astra_assistants/get_assistant.py @@ -1,16 +1,14 @@ -from astra_assistants import patch # type: ignore -from langflow.custom import Component -from openai import OpenAI - -from langflow.inputs import StrInput, MultilineInput +from langflow.base.astra_assistants.util import get_patched_openai_client +from langflow.custom.custom_component.component_with_cache import ComponentWithCache +from langflow.inputs import MultilineInput, StrInput from langflow.schema.message import Message from langflow.template import Output -class AssistantsGetAssistantName(Component): +class AssistantsGetAssistantName(ComponentWithCache): display_name = "Get Assistant name" description = "Assistant by id" - + icon = "AstraDB" inputs = [ StrInput( name="assistant_id", @@ -28,10 +26,12 @@ class AssistantsGetAssistantName(Component): Output(display_name="Assistant Name", name="assistant_name", method="process_inputs"), ] + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.client = get_patched_openai_client(self._shared_component_cache) + def process_inputs(self) -> Message: - patch(OpenAI()) assistant = self.client.beta.assistants.retrieve( assistant_id=self.assistant_id, ) - message = Message(text=assistant.name) - return message + return Message(text=assistant.name) diff --git a/src/backend/base/langflow/components/astra_assistants/getenvvar.py b/src/backend/base/langflow/components/astra_assistants/getenvvar.py index c46085936025..0076cd65193f 100644 --- a/src/backend/base/langflow/components/astra_assistants/getenvvar.py +++ b/src/backend/base/langflow/components/astra_assistants/getenvvar.py @@ -1,4 +1,5 @@ import os + from langflow.custom import Component from langflow.inputs import StrInput from langflow.schema.message import Message @@ -8,7 +9,7 @@ class GetEnvVar(Component): display_name = "Get env var" description = "Get env var" - icon = "custom_components" + icon = "AstraDB" inputs = [ StrInput( @@ -24,7 +25,6 @@ class GetEnvVar(Component): def process_inputs(self) -> Message: if self.env_var_name not in os.environ: - raise Exception(f"Environment variable {self.env_var_name} not set") - else: - message = Message(text=os.environ[self.env_var_name]) - return message + msg = f"Environment variable {self.env_var_name} not set" + raise ValueError(msg) + return Message(text=os.environ[self.env_var_name]) diff --git a/src/backend/base/langflow/components/astra_assistants/list_assistants.py b/src/backend/base/langflow/components/astra_assistants/list_assistants.py index e70417c5b7c9..40db4db8046d 100644 --- a/src/backend/base/langflow/components/astra_assistants/list_assistants.py +++ b/src/backend/base/langflow/components/astra_assistants/list_assistants.py @@ -1,25 +1,25 @@ -from astra_assistants import patch # type: ignore -from openai import OpenAI - -from langflow.custom import Component +from langflow.base.astra_assistants.util import get_patched_openai_client +from langflow.custom.custom_component.component_with_cache import ComponentWithCache from langflow.schema.message import Message from langflow.template.field.base import Output -class AssistantsListAssistants(Component): +class AssistantsListAssistants(ComponentWithCache): display_name = "List Assistants" description = "Returns a list of assistant id's" - + icon = "AstraDB" outputs = [ Output(display_name="Assistants", name="assistants", method="process_inputs"), ] + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.client = get_patched_openai_client(self._shared_component_cache) + def process_inputs(self) -> Message: - patch(OpenAI()) - assistants = self.client.beta.assistants.list() + assistants = self.client.beta.assistants.list().data id_list = [assistant.id for assistant in assistants] - message = Message( + return Message( # get text from list text="\n".join(id_list) ) - return message diff --git a/src/backend/base/langflow/components/astra_assistants/run.py b/src/backend/base/langflow/components/astra_assistants/run.py index dfde8464e36c..e065563cc6a4 100644 --- a/src/backend/base/langflow/components/astra_assistants/run.py +++ b/src/backend/base/langflow/components/astra_assistants/run.py @@ -1,30 +1,35 @@ -from astra_assistants import patch # type: ignore -from typing import Any, Optional +from typing import Any -from langflow.custom import Component -from openai import OpenAI from openai.lib.streaming import AssistantEventHandler + +from langflow.base.astra_assistants.util import get_patched_openai_client +from langflow.custom.custom_component.component_with_cache import ComponentWithCache from langflow.inputs import MultilineInput from langflow.schema import dotdict from langflow.schema.message import Message from langflow.template import Output -class AssistantsRun(Component): +class AssistantsRun(ComponentWithCache): display_name = "Run Assistant" description = "Executes an Assistant Run against a thread" + icon = "AstraDB" + + def __init__(self, **kwargs) -> None: + super().__init__(**kwargs) + self.client = get_patched_openai_client(self._shared_component_cache) + self.thread_id = None def update_build_config( self, build_config: dotdict, field_value: Any, - field_name: Optional[str] = None, - ): + field_name: str | None = None, + ) -> None: if field_name == "thread_id": if field_value is None: thread = self.client.beta.threads.create() self.thread_id = thread.id - field_value build_config["thread_id"] = field_value inputs = [ @@ -57,37 +62,28 @@ def update_build_config( outputs = [Output(display_name="Assistant Response", name="assistant_response", method="process_inputs")] def process_inputs(self) -> Message: - patch(OpenAI()) - try: - text = "" + text = "" - if self.thread_id is None: - thread = self.client.beta.threads.create() - self.thread_id = thread.id + if self.thread_id is None: + thread = self.client.beta.threads.create() + self.thread_id = thread.id - # add the user message - self.client.beta.threads.messages.create(thread_id=self.thread_id, role="user", content=self.user_message) + # add the user message + self.client.beta.threads.messages.create(thread_id=self.thread_id, role="user", content=self.user_message) - class EventHandler(AssistantEventHandler): - def __init__(self): - super().__init__() + class EventHandler(AssistantEventHandler): + def __init__(self) -> None: + super().__init__() - def on_exception(self, exception: Exception) -> None: - print(f"Exception: {exception}") - raise exception + def on_exception(self, exception: Exception) -> None: + raise exception - event_handler = EventHandler() - with self.client.beta.threads.runs.create_and_stream( - thread_id=self.thread_id, - assistant_id=self.assistant_id, - event_handler=event_handler, - ) as stream: - # return stream.text_deltas - for part in stream.text_deltas: - text += part - print(part) - message = Message(text=text) - return message - except Exception as e: - print(e) - raise Exception(f"Error running assistant: {e}") + event_handler = EventHandler() + with self.client.beta.threads.runs.create_and_stream( + thread_id=self.thread_id, + assistant_id=self.assistant_id, + event_handler=event_handler, + ) as stream: + for part in stream.text_deltas: + text += part + return Message(text=text) diff --git a/src/backend/base/langflow/components/chains/ConversationChain.py b/src/backend/base/langflow/components/chains/ConversationChain.py deleted file mode 100644 index 712c9760dc7e..000000000000 --- a/src/backend/base/langflow/components/chains/ConversationChain.py +++ /dev/null @@ -1,41 +0,0 @@ -from langchain.chains import ConversationChain - -from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message -from langflow.inputs import MultilineInput, HandleInput - - -class ConversationChainComponent(LCChainComponent): - display_name = "ConversationChain" - description = "Chain to have a conversation and load context from memory." - name = "ConversationChain" - - inputs = [ - MultilineInput( - name="input_value", display_name="Input", info="The input value to pass to the chain.", required=True - ), - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - HandleInput( - name="memory", - display_name="Memory", - input_types=["BaseChatMemory"], - ), - ] - - def invoke_chain(self) -> Message: - if not self.memory: - chain = ConversationChain(llm=self.llm) - else: - chain = ConversationChain(llm=self.llm, memory=self.memory) - - result = chain.invoke({"input": self.input_value}, config={"callbacks": self.get_langchain_callbacks()}) - if isinstance(result, dict): - result = result.get(chain.output_key, "") # type: ignore - - elif isinstance(result, str): - result = result - else: - result = result.get("response") - result = str(result) - self.status = result - return Message(text=result) diff --git a/src/backend/base/langflow/components/chains/LLMCheckerChain.py b/src/backend/base/langflow/components/chains/LLMCheckerChain.py deleted file mode 100644 index ede139209595..000000000000 --- a/src/backend/base/langflow/components/chains/LLMCheckerChain.py +++ /dev/null @@ -1,29 +0,0 @@ -from langchain.chains import LLMCheckerChain - -from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message -from langflow.inputs import MultilineInput, HandleInput - - -class LLMCheckerChainComponent(LCChainComponent): - display_name = "LLMCheckerChain" - description = "Chain for question-answering with self-verification." - documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" - name = "LLMCheckerChain" - - inputs = [ - MultilineInput( - name="input_value", display_name="Input", info="The input value to pass to the chain.", required=True - ), - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - ] - - def invoke_chain(self) -> Message: - chain = LLMCheckerChain.from_llm(llm=self.llm) - response = chain.invoke( - {chain.input_key: self.input_value}, config={"callbacks": self.get_langchain_callbacks()} - ) - result = response.get(chain.output_key, "") - result = str(result) - self.status = result - return Message(text=result) diff --git a/src/backend/base/langflow/components/chains/LLMMathChain.py b/src/backend/base/langflow/components/chains/LLMMathChain.py deleted file mode 100644 index 56705ccf5fe0..000000000000 --- a/src/backend/base/langflow/components/chains/LLMMathChain.py +++ /dev/null @@ -1,32 +0,0 @@ -from langchain.chains import LLMMathChain - -from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message -from langflow.inputs import MultilineInput, HandleInput -from langflow.template import Output - - -class LLMMathChainComponent(LCChainComponent): - display_name = "LLMMathChain" - description = "Chain that interprets a prompt and executes python code to do math." - documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_math" - name = "LLMMathChain" - - inputs = [ - MultilineInput( - name="input_value", display_name="Input", info="The input value to pass to the chain.", required=True - ), - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - ] - - outputs = [Output(display_name="Text", name="text", method="invoke_chain")] - - def invoke_chain(self) -> Message: - chain = LLMMathChain.from_llm(llm=self.llm) - response = chain.invoke( - {chain.input_key: self.input_value}, config={"callbacks": self.get_langchain_callbacks()} - ) - result = response.get(chain.output_key, "") - result = str(result) - self.status = result - return Message(text=result) diff --git a/src/backend/base/langflow/components/chains/RetrievalQA.py b/src/backend/base/langflow/components/chains/RetrievalQA.py deleted file mode 100644 index 2e9c0ef08015..000000000000 --- a/src/backend/base/langflow/components/chains/RetrievalQA.py +++ /dev/null @@ -1,64 +0,0 @@ -from langchain.chains import RetrievalQA - -from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message -from langflow.inputs import HandleInput, MultilineInput, BoolInput, DropdownInput - - -class RetrievalQAComponent(LCChainComponent): - display_name = "Retrieval QA" - description = "Chain for question-answering querying sources from a retriever." - name = "RetrievalQA" - - inputs = [ - MultilineInput( - name="input_value", display_name="Input", info="The input value to pass to the chain.", required=True - ), - DropdownInput( - name="chain_type", - display_name="Chain Type", - info="Chain type to use.", - options=["Stuff", "Map Reduce", "Refine", "Map Rerank"], - value="Stuff", - advanced=True, - ), - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - HandleInput(name="retriever", display_name="Retriever", input_types=["Retriever"], required=True), - HandleInput( - name="memory", - display_name="Memory", - input_types=["BaseChatMemory"], - ), - BoolInput( - name="return_source_documents", - display_name="Return Source Documents", - value=False, - ), - ] - - def invoke_chain(self) -> Message: - chain_type = self.chain_type.lower().replace(" ", "_") - if self.memory: - self.memory.input_key = "query" - self.memory.output_key = "result" - - runnable = RetrievalQA.from_chain_type( - llm=self.llm, - chain_type=chain_type, - retriever=self.retriever, - memory=self.memory, - # always include to help debugging - # - return_source_documents=True, - ) - - result = runnable.invoke({"query": self.input_value}, config={"callbacks": self.get_langchain_callbacks()}) - - source_docs = self.to_data(result.get("source_documents", [])) - result_str = str(result.get("result", "")) - if self.return_source_documents and len(source_docs): - references_str = self.create_references_from_data(source_docs) - result_str = "\n".join([result_str, references_str]) - # put the entire result to debug history, query and content - self.status = {**result, "source_documents": source_docs, "output": result_str} - return result_str diff --git a/src/backend/base/langflow/components/chains/SQLGenerator.py b/src/backend/base/langflow/components/chains/SQLGenerator.py deleted file mode 100644 index 0b57e92b3c92..000000000000 --- a/src/backend/base/langflow/components/chains/SQLGenerator.py +++ /dev/null @@ -1,51 +0,0 @@ -from langchain.chains import create_sql_query_chain -from langchain_core.prompts import PromptTemplate -from langchain_core.runnables import Runnable -from langflow.base.chains.model import LCChainComponent -from langflow.field_typing import Message -from langflow.inputs import MultilineInput, HandleInput, IntInput -from langflow.template import Output - - -class SQLGeneratorComponent(LCChainComponent): - display_name = "Natural Language to SQL" - description = "Generate SQL from natural language." - name = "SQLGenerator" - - inputs = [ - MultilineInput( - name="input_value", display_name="Input", info="The input value to pass to the chain.", required=True - ), - HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), - HandleInput(name="db", display_name="SQLDatabase", input_types=["SQLDatabase"], required=True), - IntInput( - name="top_k", display_name="Top K", info="The number of results per select statement to return.", value=5 - ), - MultilineInput(name="prompt", display_name="Prompt", info="The prompt must contain `{question}`."), - ] - - outputs = [Output(display_name="Text", name="text", method="invoke_chain")] - - def invoke_chain(self) -> Message: - if self.prompt: - prompt_template = PromptTemplate.from_template(template=self.prompt) - else: - prompt_template = None - - if self.top_k < 1: - raise ValueError("Top K must be greater than 0.") - - if not prompt_template: - sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, k=self.top_k) - else: - # Check if {question} is in the prompt - if "{question}" not in prompt_template.template or "question" not in prompt_template.input_variables: - raise ValueError("Prompt must contain `{question}` to be used with Natural Language to SQL.") - sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, prompt=prompt_template, k=self.top_k) - query_writer: Runnable = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()} - response = query_writer.invoke( - {"question": self.input_value}, config={"callbacks": self.get_langchain_callbacks()} - ) - query = response.get("query") - self.status = query - return query diff --git a/src/backend/base/langflow/components/chains/__init__.py b/src/backend/base/langflow/components/chains/__init__.py index 39faca5e9f79..e69de29bb2d1 100644 --- a/src/backend/base/langflow/components/chains/__init__.py +++ b/src/backend/base/langflow/components/chains/__init__.py @@ -1,13 +0,0 @@ -from .ConversationChain import ConversationChainComponent -from .LLMCheckerChain import LLMCheckerChainComponent -from .LLMMathChain import LLMMathChainComponent -from .RetrievalQA import RetrievalQAComponent -from .SQLGenerator import SQLGeneratorComponent - -__all__ = [ - "ConversationChainComponent", - "LLMCheckerChainComponent", - "LLMMathChainComponent", - "RetrievalQAComponent", - "SQLGeneratorComponent", -] diff --git a/src/backend/base/langflow/components/cohere/__init__.py b/src/backend/base/langflow/components/cohere/__init__.py new file mode 100644 index 000000000000..91945ee27f91 --- /dev/null +++ b/src/backend/base/langflow/components/cohere/__init__.py @@ -0,0 +1,3 @@ +from .cohere_rerank import CohereRerankComponent + +__all__ = ["CohereRerankComponent"] diff --git a/src/backend/base/langflow/components/cohere/cohere_rerank.py b/src/backend/base/langflow/components/cohere/cohere_rerank.py new file mode 100644 index 000000000000..177899954859 --- /dev/null +++ b/src/backend/base/langflow/components/cohere/cohere_rerank.py @@ -0,0 +1,90 @@ +from typing import cast + +from langchain.retrievers import ContextualCompressionRetriever +from langchain_cohere import CohereRerank + +from langflow.base.vectorstores.model import ( + LCVectorStoreComponent, + check_cached_vector_store, +) +from langflow.field_typing import Retriever, VectorStore +from langflow.io import ( + DropdownInput, + HandleInput, + IntInput, + MessageTextInput, + MultilineInput, + SecretStrInput, +) +from langflow.schema import Data +from langflow.template.field.base import Output + + +class CohereRerankComponent(LCVectorStoreComponent): + display_name = "Cohere Rerank" + description = "Rerank documents using the Cohere API and a retriever." + name = "CohereRerank" + icon = "Cohere" + legacy: bool = True + + inputs = [ + MultilineInput( + name="search_query", + display_name="Search Query", + ), + DropdownInput( + name="model", + display_name="Model", + options=[ + "rerank-english-v3.0", + "rerank-multilingual-v3.0", + "rerank-english-v2.0", + "rerank-multilingual-v2.0", + ], + value="rerank-english-v3.0", + ), + SecretStrInput(name="api_key", display_name="API Key"), + IntInput(name="top_n", display_name="Top N", value=3), + MessageTextInput( + name="user_agent", + display_name="User Agent", + value="langflow", + advanced=True, + ), + HandleInput(name="retriever", display_name="Retriever", input_types=["Retriever"]), + ] + + outputs = [ + Output( + display_name="Retriever", + name="base_retriever", + method="build_base_retriever", + ), + Output( + display_name="Search Results", + name="search_results", + method="search_documents", + ), + ] + + def build_base_retriever(self) -> Retriever: # type: ignore[type-var] + cohere_reranker = CohereRerank( + cohere_api_key=self.api_key, + model=self.model, + top_n=self.top_n, + user_agent=self.user_agent, + ) + retriever = ContextualCompressionRetriever(base_compressor=cohere_reranker, base_retriever=self.retriever) + return cast(Retriever, retriever) + + async def search_documents(self) -> list[Data]: # type: ignore[override] + retriever = self.build_base_retriever() + documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) + data = self.to_data(documents) + self.status = data + return data + + @check_cached_vector_store + def build_vector_store(self) -> VectorStore: + msg = "Cohere Rerank does not support vector stores." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/components/composio/__init__.py b/src/backend/base/langflow/components/composio/__init__.py new file mode 100644 index 000000000000..24e438134a44 --- /dev/null +++ b/src/backend/base/langflow/components/composio/__init__.py @@ -0,0 +1,3 @@ +from .composio_api import ComposioAPIComponent + +__all__ = ["ComposioAPIComponent"] diff --git a/src/backend/base/langflow/components/composio/composio_api.py b/src/backend/base/langflow/components/composio/composio_api.py new file mode 100644 index 000000000000..d830b8c36ba2 --- /dev/null +++ b/src/backend/base/langflow/components/composio/composio_api.py @@ -0,0 +1,173 @@ +from collections.abc import Sequence +from typing import Any + +from composio_langchain import Action, App, ComposioToolSet +from langchain_core.tools import Tool +from loguru import logger +from typing_extensions import override + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.inputs import DropdownInput, MessageTextInput, MultiselectInput, SecretStrInput, StrInput + + +class ComposioAPIComponent(LCToolComponent): + display_name: str = "Composio Tools" + description: str = "Use Composio toolset to run actions with your agent" + name = "ComposioAPI" + icon = "Composio" + documentation: str = "https://docs.composio.dev" + + inputs = [ + MessageTextInput(name="entity_id", display_name="Entity ID", value="default", advanced=True), + SecretStrInput( + name="api_key", + display_name="Composio API Key", + required=True, + refresh_button=True, + info="Refer to https://docs.composio.dev/introduction/foundations/howtos/get_api_key", + ), + DropdownInput( + name="app_names", + display_name="App Name", + options=list(App.__annotations__), + value="", + info="The app name to use. Please refresh after selecting app name", + refresh_button=True, + ), + MultiselectInput( + name="action_names", + display_name="Actions to use", + required=False, + options=[], + value=[], + info="The actions to pass to agent to execute", + ), + StrInput( + name="auth_status_config", + display_name="Auth status", + value="", + refresh_button=True, + info="Open link or enter api key. Then refresh button", + ), + ] + + def _check_for_authorization(self, app: str) -> str: + """Checks if the app is authorized. + + Args: + app (str): The app name to check authorization for. + + Returns: + str: The authorization status. + """ + toolset = self._build_wrapper() + entity = toolset.client.get_entity(id=self.entity_id) + try: + entity.get_connection(app=app) + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Authorization error") + return self._handle_authorization_failure(toolset, entity, app) + + return f"{app} CONNECTED" + + def _handle_authorization_failure(self, toolset: ComposioToolSet, entity: Any, app: str) -> str: + """Handles the authorization failure by attempting to process API key auth or initiate default connection. + + Args: + toolset (ComposioToolSet): The toolset instance. + entity (Any): The entity instance. + app (str): The app name. + + Returns: + str: The result of the authorization failure message. + """ + try: + auth_schemes = toolset.client.apps.get(app).auth_schemes + if auth_schemes[0].auth_mode == "API_KEY": + return self._process_api_key_auth(entity, app) + return self._initiate_default_connection(entity, app) + except Exception: # noqa: BLE001 + logger.exception("Authorization error") + return "Error" + + def _process_api_key_auth(self, entity: Any, app: str) -> str: + """Processes the API key authentication. + + Args: + entity (Any): The entity instance. + app (str): The app name. + + Returns: + str: The status of the API key authentication. + """ + auth_status_config = self.auth_status_config + is_url = "http" in auth_status_config or "https" in auth_status_config + is_different_app = "CONNECTED" in auth_status_config and app not in auth_status_config + is_default_api_key_message = "API Key" in auth_status_config + + if is_different_app or is_url or is_default_api_key_message: + return "Enter API Key" + if not is_default_api_key_message: + entity.initiate_connection( + app_name=app, + auth_mode="API_KEY", + auth_config={"api_key": self.auth_status_config}, + use_composio_auth=False, + force_new_integration=True, + ) + return f"{app} CONNECTED" + return "Enter API Key" + + def _initiate_default_connection(self, entity: Any, app: str) -> str: + connection = entity.initiate_connection(app_name=app, use_composio_auth=True, force_new_integration=True) + return connection.redirectUrl + + def _get_connected_app_names_for_entity(self) -> list[str]: + toolset = self._build_wrapper() + connections = toolset.client.get_entity(id=self.entity_id).get_connections() + return list({connection.appUniqueId for connection in connections}) + + def _update_app_names_with_connected_status(self, build_config: dict) -> dict: + connected_app_names = self._get_connected_app_names_for_entity() + + app_names = [ + f"{app_name}_CONNECTED" for app_name in App.__annotations__ if app_name.lower() in connected_app_names + ] + non_connected_app_names = [ + app_name for app_name in App.__annotations__ if app_name.lower() not in connected_app_names + ] + build_config["app_names"]["options"] = app_names + non_connected_app_names + build_config["app_names"]["value"] = app_names[0] if app_names else "" + return build_config + + def _get_normalized_app_name(self) -> str: + return self.app_names.replace("_CONNECTED", "").replace("_connected", "") + + @override + def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict: + if field_name == "api_key": + if hasattr(self, "api_key") and self.api_key != "": + build_config = self._update_app_names_with_connected_status(build_config) + return build_config + + if field_name in {"app_names", "auth_status_config"}: + if hasattr(self, "api_key") and self.api_key != "": + build_config["auth_status_config"]["value"] = self._check_for_authorization( + self._get_normalized_app_name() + ) + all_action_names = list(Action.__annotations__) + app_action_names = [ + action_name + for action_name in all_action_names + if action_name.lower().startswith(self._get_normalized_app_name().lower() + "_") + ] + build_config["action_names"]["options"] = app_action_names + build_config["action_names"]["value"] = [app_action_names[0]] if app_action_names else [""] + return build_config + + def build_tool(self) -> Sequence[Tool]: + composio_toolset = self._build_wrapper() + return composio_toolset.get_tools(actions=self.action_names) + + def _build_wrapper(self) -> ComposioToolSet: + return ComposioToolSet(api_key=self.api_key) diff --git a/src/backend/base/langflow/components/confluence/__init__.py b/src/backend/base/langflow/components/confluence/__init__.py new file mode 100644 index 000000000000..63ccc66ac21d --- /dev/null +++ b/src/backend/base/langflow/components/confluence/__init__.py @@ -0,0 +1,3 @@ +from .confluence import ConfluenceComponent + +__all__ = ["ConfluenceComponent"] diff --git a/src/backend/base/langflow/components/confluence/confluence.py b/src/backend/base/langflow/components/confluence/confluence.py new file mode 100644 index 000000000000..418bfe0ad927 --- /dev/null +++ b/src/backend/base/langflow/components/confluence/confluence.py @@ -0,0 +1,84 @@ +from langchain_community.document_loaders import ConfluenceLoader +from langchain_community.document_loaders.confluence import ContentFormat + +from langflow.custom import Component +from langflow.io import BoolInput, DropdownInput, IntInput, Output, SecretStrInput, StrInput +from langflow.schema import Data + + +class ConfluenceComponent(Component): + display_name = "Confluence" + description = "Confluence wiki collaboration platform" + documentation = "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/" + trace_type = "tool" + icon = "Confluence" + name = "Confluence" + + inputs = [ + StrInput( + name="url", + display_name="Site URL", + required=True, + info="The base URL of the Confluence Space. Example: https://.atlassian.net/wiki.", + ), + StrInput( + name="username", + display_name="Username", + required=True, + info="Atlassian User E-mail. Example: email@example.com", + ), + SecretStrInput( + name="api_key", + display_name="API Key", + required=True, + info="Atlassian Key. Create at: https://id.atlassian.com/manage-profile/security/api-tokens", + ), + StrInput(name="space_key", display_name="Space Key", required=True), + BoolInput(name="cloud", display_name="Use Cloud?", required=True, value=True, advanced=True), + DropdownInput( + name="content_format", + display_name="Content Format", + options=[ + ContentFormat.EDITOR.value, + ContentFormat.EXPORT_VIEW.value, + ContentFormat.ANONYMOUS_EXPORT_VIEW.value, + ContentFormat.STORAGE.value, + ContentFormat.VIEW.value, + ], + value=ContentFormat.STORAGE.value, + required=True, + advanced=True, + info="Specify content format, defaults to ContentFormat.STORAGE", + ), + IntInput( + name="max_pages", + display_name="Max Pages", + required=False, + value=1000, + advanced=True, + info="Maximum number of pages to retrieve in total, defaults 1000", + ), + ] + + outputs = [ + Output(name="data", display_name="Data", method="load_documents"), + ] + + def build_confluence(self) -> ConfluenceLoader: + content_format = ContentFormat(self.content_format) + return ConfluenceLoader( + url=self.url, + username=self.username, + api_key=self.api_key, + cloud=self.cloud, + space_key=self.space_key, + content_format=content_format, + max_pages=self.max_pages, + ) + + def load_documents(self) -> list[Data]: + confluence = self.build_confluence() + documents = confluence.load() + data = [Data.from_document(doc) for doc in documents] # Using the from_document method of Data + self.status = data + return data diff --git a/src/backend/base/langflow/components/crewai/__init__.py b/src/backend/base/langflow/components/crewai/__init__.py new file mode 100644 index 000000000000..d601c7b1cced --- /dev/null +++ b/src/backend/base/langflow/components/crewai/__init__.py @@ -0,0 +1,15 @@ +from .crewai import CrewAIAgentComponent +from .hierarchical_crew import HierarchicalCrewComponent +from .hierarchical_task import HierarchicalTaskComponent +from .sequential_crew import SequentialCrewComponent +from .sequential_task import SequentialTaskComponent +from .sequential_task_agent import SequentialTaskAgentComponent + +__all__ = [ + "CrewAIAgentComponent", + "HierarchicalCrewComponent", + "HierarchicalTaskComponent", + "SequentialCrewComponent", + "SequentialTaskComponent", + "SequentialTaskAgentComponent", +] diff --git a/src/backend/base/langflow/components/crewai/crewai.py b/src/backend/base/langflow/components/crewai/crewai.py new file mode 100644 index 000000000000..879977ec7154 --- /dev/null +++ b/src/backend/base/langflow/components/crewai/crewai.py @@ -0,0 +1,85 @@ +from crewai import Agent + +from langflow.custom import Component +from langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output + + +class CrewAIAgentComponent(Component): + display_name = "CrewAI Agent" + description = "Represents an agent of CrewAI." + documentation: str = "https://docs.crewai.com/how-to/LLM-Connections/" + icon = "CrewAI" + + inputs = [ + MultilineInput(name="role", display_name="Role", info="The role of the agent."), + MultilineInput(name="goal", display_name="Goal", info="The objective of the agent."), + MultilineInput(name="backstory", display_name="Backstory", info="The backstory of the agent."), + HandleInput( + name="tools", + display_name="Tools", + input_types=["Tool"], + is_list=True, + info="Tools at agents disposal", + value=[], + ), + HandleInput( + name="llm", + display_name="Language Model", + info="Language model that will run the agent.", + input_types=["LanguageModel"], + ), + BoolInput( + name="memory", + display_name="Memory", + info="Whether the agent should have memory or not", + advanced=True, + value=True, + ), + BoolInput( + name="verbose", + display_name="Verbose", + advanced=True, + value=False, + ), + BoolInput( + name="allow_delegation", + display_name="Allow Delegation", + info="Whether the agent is allowed to delegate tasks to other agents.", + value=True, + ), + BoolInput( + name="allow_code_execution", + display_name="Allow Code Execution", + info="Whether the agent is allowed to execute code.", + value=False, + advanced=True, + ), + DictInput( + name="kwargs", + display_name="kwargs", + info="kwargs of agent.", + is_list=True, + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Agent", name="output", method="build_output"), + ] + + def build_output(self) -> Agent: + kwargs = self.kwargs or {} + agent = Agent( + role=self.role, + goal=self.goal, + backstory=self.backstory, + llm=self.llm, + verbose=self.verbose, + memory=self.memory, + tools=self.tools or [], + allow_delegation=self.allow_delegation, + allow_code_execution=self.allow_code_execution, + **kwargs, + ) + self.status = repr(agent) + return agent diff --git a/src/backend/base/langflow/components/crewai/hierarchical_crew.py b/src/backend/base/langflow/components/crewai/hierarchical_crew.py new file mode 100644 index 000000000000..8295554013e3 --- /dev/null +++ b/src/backend/base/langflow/components/crewai/hierarchical_crew.py @@ -0,0 +1,52 @@ +import os + +from crewai import Crew, Process + +from langflow.base.agents.crewai.crew import BaseCrewComponent +from langflow.io import HandleInput, SecretStrInput + + +class HierarchicalCrewComponent(BaseCrewComponent): + display_name: str = "Hierarchical Crew" + description: str = ( + "Represents a group of agents, defining how they should collaborate and the tasks they should perform." + ) + documentation: str = "https://docs.crewai.com/how-to/Hierarchical/" + icon = "CrewAI" + + inputs = [ + *BaseCrewComponent._base_inputs, + HandleInput(name="agents", display_name="Agents", input_types=["Agent"], is_list=True), + HandleInput(name="tasks", display_name="Tasks", input_types=["HierarchicalTask"], is_list=True), + HandleInput(name="manager_llm", display_name="Manager LLM", input_types=["LanguageModel"], required=False), + HandleInput(name="manager_agent", display_name="Manager Agent", input_types=["Agent"], required=False), + SecretStrInput( + name="openai_api_key", + display_name="OpenAI API Key", + info="The OpenAI API Key to use for the OpenAI model.", + value="OPENAI_API_KEY", + ), + ] + + def build_crew(self) -> Crew: + tasks, agents = self.get_tasks_and_agents() + + # Set the OpenAI API Key + if self.openai_api_key: + os.environ["OPENAI_API_KEY"] = self.openai_api_key + + return Crew( + agents=agents, + tasks=tasks, + process=Process.hierarchical, + verbose=self.verbose, + memory=self.memory, + cache=self.use_cache, + max_rpm=self.max_rpm, + share_crew=self.share_crew, + function_calling_llm=self.function_calling_llm, + manager_agent=self.manager_agent, + manager_llm=self.manager_llm, + step_callback=self.get_step_callback(), + task_callback=self.get_task_callback(), + ) diff --git a/src/backend/base/langflow/components/helpers/HierarchicalTask.py b/src/backend/base/langflow/components/crewai/hierarchical_task.py similarity index 100% rename from src/backend/base/langflow/components/helpers/HierarchicalTask.py rename to src/backend/base/langflow/components/crewai/hierarchical_task.py diff --git a/src/backend/base/langflow/components/crewai/sequential_crew.py b/src/backend/base/langflow/components/crewai/sequential_crew.py new file mode 100644 index 000000000000..df17e75253c4 --- /dev/null +++ b/src/backend/base/langflow/components/crewai/sequential_crew.py @@ -0,0 +1,36 @@ +from crewai import Agent, Crew, Process, Task + +from langflow.base.agents.crewai.crew import BaseCrewComponent +from langflow.io import HandleInput +from langflow.schema.message import Message + + +class SequentialCrewComponent(BaseCrewComponent): + display_name: str = "Sequential Crew" + description: str = "Represents a group of agents with tasks that are executed sequentially." + documentation: str = "https://docs.crewai.com/how-to/Sequential/" + icon = "CrewAI" + + inputs = [ + *BaseCrewComponent._base_inputs, + HandleInput(name="tasks", display_name="Tasks", input_types=["SequentialTask"], is_list=True), + ] + + def get_tasks_and_agents(self) -> tuple[list[Task], list[Agent]]: + return self.tasks, [task.agent for task in self.tasks] + + def build_crew(self) -> Message: + tasks, agents = self.get_tasks_and_agents() + return Crew( + agents=agents, + tasks=tasks, + process=Process.sequential, + verbose=self.verbose, + memory=self.memory, + cache=self.use_cache, + max_rpm=self.max_rpm, + share_crew=self.share_crew, + function_calling_llm=self.function_calling_llm, + step_callback=self.get_step_callback(), + task_callback=self.get_task_callback(), + ) diff --git a/src/backend/base/langflow/components/crewai/sequential_task.py b/src/backend/base/langflow/components/crewai/sequential_task.py new file mode 100644 index 000000000000..81add12c689b --- /dev/null +++ b/src/backend/base/langflow/components/crewai/sequential_task.py @@ -0,0 +1,72 @@ +from langflow.base.agents.crewai.tasks import SequentialTask +from langflow.custom import Component +from langflow.io import BoolInput, HandleInput, MultilineInput, Output + + +class SequentialTaskComponent(Component): + display_name: str = "Sequential Task" + description: str = "Each task must have a description, an expected output and an agent responsible for execution." + icon = "CrewAI" + inputs = [ + MultilineInput( + name="task_description", + display_name="Description", + info="Descriptive text detailing task's purpose and execution.", + ), + MultilineInput( + name="expected_output", + display_name="Expected Output", + info="Clear definition of expected task outcome.", + ), + HandleInput( + name="tools", + display_name="Tools", + input_types=["Tool"], + is_list=True, + info="List of tools/resources limited for task execution. Uses the Agent tools by default.", + required=False, + advanced=True, + ), + HandleInput( + name="agent", + display_name="Agent", + input_types=["Agent"], + info="CrewAI Agent that will perform the task", + required=True, + ), + HandleInput( + name="task", + display_name="Task", + input_types=["SequentialTask"], + info="CrewAI Task that will perform the task", + ), + BoolInput( + name="async_execution", + display_name="Async Execution", + value=True, + advanced=True, + info="Boolean flag indicating asynchronous task execution.", + ), + ] + + outputs = [ + Output(display_name="Task", name="task_output", method="build_task"), + ] + + def build_task(self) -> list[SequentialTask]: + tasks: list[SequentialTask] = [] + task = SequentialTask( + description=self.task_description, + expected_output=self.expected_output, + tools=self.agent.tools, + async_execution=False, + agent=self.agent, + ) + tasks.append(task) + self.status = task + if self.task: + if isinstance(self.task, list) and all(isinstance(task, SequentialTask) for task in self.task): + tasks = self.task + tasks + elif isinstance(self.task, SequentialTask): + tasks = [self.task, *tasks] + return tasks diff --git a/src/backend/base/langflow/components/crewai/sequential_task_agent.py b/src/backend/base/langflow/components/crewai/sequential_task_agent.py new file mode 100644 index 000000000000..bb3e5c59cbce --- /dev/null +++ b/src/backend/base/langflow/components/crewai/sequential_task_agent.py @@ -0,0 +1,138 @@ +from crewai import Agent, Task + +from langflow.base.agents.crewai.tasks import SequentialTask +from langflow.custom import Component +from langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output + + +class SequentialTaskAgentComponent(Component): + display_name = "Sequential Task Agent" + description = "Creates a CrewAI Task and its associated Agent." + documentation = "https://docs.crewai.com/how-to/LLM-Connections/" + icon = "CrewAI" + + inputs = [ + # Agent inputs + MultilineInput(name="role", display_name="Role", info="The role of the agent."), + MultilineInput(name="goal", display_name="Goal", info="The objective of the agent."), + MultilineInput( + name="backstory", + display_name="Backstory", + info="The backstory of the agent.", + ), + HandleInput( + name="tools", + display_name="Tools", + input_types=["Tool"], + is_list=True, + info="Tools at agent's disposal", + value=[], + ), + HandleInput( + name="llm", + display_name="Language Model", + info="Language model that will run the agent.", + input_types=["LanguageModel"], + ), + BoolInput( + name="memory", + display_name="Memory", + info="Whether the agent should have memory or not", + advanced=True, + value=True, + ), + BoolInput( + name="verbose", + display_name="Verbose", + advanced=True, + value=True, + ), + BoolInput( + name="allow_delegation", + display_name="Allow Delegation", + info="Whether the agent is allowed to delegate tasks to other agents.", + value=False, + advanced=True, + ), + BoolInput( + name="allow_code_execution", + display_name="Allow Code Execution", + info="Whether the agent is allowed to execute code.", + value=False, + advanced=True, + ), + DictInput( + name="agent_kwargs", + display_name="Agent kwargs", + info="Additional kwargs for the agent.", + is_list=True, + advanced=True, + ), + # Task inputs + MultilineInput( + name="task_description", + display_name="Task Description", + info="Descriptive text detailing task's purpose and execution.", + ), + MultilineInput( + name="expected_output", + display_name="Expected Task Output", + info="Clear definition of expected task outcome.", + ), + BoolInput( + name="async_execution", + display_name="Async Execution", + value=False, + advanced=True, + info="Boolean flag indicating asynchronous task execution.", + ), + # Chaining input + HandleInput( + name="previous_task", + display_name="Previous Task", + input_types=["SequentialTask"], + info="The previous task in the sequence (for chaining).", + required=False, + ), + ] + + outputs = [ + Output( + display_name="Sequential Task", + name="task_output", + method="build_agent_and_task", + ), + ] + + def build_agent_and_task(self) -> list[SequentialTask]: + # Build the agent + agent_kwargs = self.agent_kwargs or {} + agent = Agent( + role=self.role, + goal=self.goal, + backstory=self.backstory, + llm=self.llm, + verbose=self.verbose, + memory=self.memory, + tools=self.tools or [], + allow_delegation=self.allow_delegation, + allow_code_execution=self.allow_code_execution, + **agent_kwargs, + ) + + # Build the task + task = Task( + description=self.task_description, + expected_output=self.expected_output, + agent=agent, + async_execution=self.async_execution, + ) + + # If there's a previous task, create a list of tasks + if self.previous_task: + tasks = [*self.previous_task, task] if isinstance(self.previous_task, list) else [self.previous_task, task] + else: + tasks = [task] + + self.status = f"Agent: {agent!r}\nTask: {task!r}" + return tasks diff --git a/src/backend/base/langflow/components/custom_component/__init__.py b/src/backend/base/langflow/components/custom_component/__init__.py new file mode 100644 index 000000000000..fa37797fe807 --- /dev/null +++ b/src/backend/base/langflow/components/custom_component/__init__.py @@ -0,0 +1,5 @@ +from .custom_component import CustomComponent + +__all__ = [ + "CustomComponent", +] diff --git a/src/backend/base/langflow/components/custom_component/custom_component.py b/src/backend/base/langflow/components/custom_component/custom_component.py new file mode 100644 index 000000000000..686ef0128f82 --- /dev/null +++ b/src/backend/base/langflow/components/custom_component/custom_component.py @@ -0,0 +1,31 @@ +# from langflow.field_typing import Data +from langflow.custom import Component +from langflow.io import MessageTextInput, Output +from langflow.schema import Data + + +class CustomComponent(Component): + display_name = "Custom Component" + description = "Use as a template to create your own component." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "code" + name = "CustomComponent" + + inputs = [ + MessageTextInput( + name="input_value", + display_name="Input Value", + info="This is a custom component Input", + value="Hello, World!", + tool_mode=True, + ), + ] + + outputs = [ + Output(display_name="Output", name="output", method="build_output"), + ] + + def build_output(self) -> Data: + data = Data(value=self.input_value) + self.status = data + return data diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py deleted file mode 100644 index 8a1656a587f6..000000000000 --- a/src/backend/base/langflow/components/data/APIRequest.py +++ /dev/null @@ -1,192 +0,0 @@ -import asyncio -import json -from typing import Any, List, Optional -from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse - -import httpx -from loguru import logger - -from langflow.base.curl.parse import parse_context -from langflow.custom import Component -from langflow.io import DataInput, DropdownInput, IntInput, MessageTextInput, NestedDictInput, Output -from langflow.schema import Data -from langflow.schema.dotdict import dotdict - - -class APIRequestComponent(Component): - display_name = "API Request" - description = ( - "This component allows you to make HTTP requests to one or more URLs. " - "You can provide headers and body as either dictionaries or Data objects. " - "Additionally, you can append query parameters to the URLs.\n\n" - "**Note:** Check advanced options for more settings." - ) - icon = "Globe" - name = "APIRequest" - - inputs = [ - MessageTextInput( - name="urls", - display_name="URLs", - is_list=True, - info="Enter one or more URLs, separated by commas.", - ), - MessageTextInput( - name="curl", - display_name="Curl", - info="Paste a curl command to populate the fields. This will fill in the dictionary fields for headers and body.", - advanced=False, - refresh_button=True, - ), - DropdownInput( - name="method", - display_name="Method", - options=["GET", "POST", "PATCH", "PUT"], - value="GET", - info="The HTTP method to use (GET, POST, PATCH, PUT).", - ), - NestedDictInput( - name="headers", - display_name="Headers", - info="The headers to send with the request as a dictionary. This is populated when using the CURL field.", - input_types=["Data"], - ), - NestedDictInput( - name="body", - display_name="Body", - info="The body to send with the request as a dictionary (for POST, PATCH, PUT). This is populated when using the CURL field.", - input_types=["Data"], - ), - DataInput( - name="query_params", - display_name="Query Parameters", - info="The query parameters to append to the URL.", - ), - IntInput( - name="timeout", - display_name="Timeout", - value=5, - info="The timeout to use for the request.", - ), - ] - - outputs = [ - Output(display_name="Data", name="data", method="make_requests"), - ] - - def parse_curl(self, curl: str, build_config: dotdict) -> dotdict: - try: - parsed = parse_context(curl) - build_config["urls"]["value"] = [parsed.url] - build_config["method"]["value"] = parsed.method.upper() - build_config["headers"]["value"] = dict(parsed.headers) - - if parsed.data: - try: - json_data = json.loads(parsed.data) - build_config["body"]["value"] = json_data - except json.JSONDecodeError as e: - logger.error(f"Error decoding JSON data: {e}") - else: - build_config["body"]["value"] = {} - except Exception as exc: - logger.error(f"Error parsing curl: {exc}") - raise ValueError(f"Error parsing curl: {exc}") - return build_config - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "curl" and field_value: - build_config = self.parse_curl(field_value, build_config) - return build_config - - async def make_request( - self, - client: httpx.AsyncClient, - method: str, - url: str, - headers: Optional[dict] = None, - body: Optional[dict] = None, - timeout: int = 5, - ) -> Data: - method = method.upper() - if method not in ["GET", "POST", "PATCH", "PUT", "DELETE"]: - raise ValueError(f"Unsupported method: {method}") - - if isinstance(body, str) and body: - try: - body = json.loads(body) - except Exception as e: - logger.error(f"Error decoding JSON data: {e}") - body = None - raise ValueError(f"Error decoding JSON data: {e}") - - data = body if body else None - - try: - response = await client.request(method, url, headers=headers, json=data, timeout=timeout) - try: - result = response.json() - except Exception: - result = response.text - return Data( - data={ - "source": url, - "headers": headers, - "status_code": response.status_code, - "result": result, - }, - ) - except httpx.TimeoutException: - return Data( - data={ - "source": url, - "headers": headers, - "status_code": 408, - "error": "Request timed out", - }, - ) - except Exception as exc: - return Data( - data={ - "source": url, - "headers": headers, - "status_code": 500, - "error": str(exc), - }, - ) - - def add_query_params(self, url: str, params: dict) -> str: - url_parts = list(urlparse(url)) - query = dict(parse_qsl(url_parts[4])) - query.update(params) - url_parts[4] = urlencode(query) - return urlunparse(url_parts) - - async def make_requests(self) -> List[Data]: - method = self.method - urls = [url.strip() for url in self.urls if url.strip()] - curl = self.curl - headers = self.headers or {} - body = self.body or {} - timeout = self.timeout - query_params = self.query_params.data if self.query_params else {} - - if curl: - self._build_config = self.parse_curl(curl, dotdict()) - - if isinstance(headers, Data): - headers = headers.data - - if isinstance(body, Data): - body = body.data - - bodies = [body] * len(urls) - - urls = [self.add_query_params(url, query_params) for url in urls] - - async with httpx.AsyncClient() as client: - results = await asyncio.gather( - *[self.make_request(client, method, u, headers, rec, timeout) for u, rec in zip(urls, bodies)] - ) - self.status = results - return results diff --git a/src/backend/base/langflow/components/data/Directory.py b/src/backend/base/langflow/components/data/Directory.py deleted file mode 100644 index 55a515f51f0a..000000000000 --- a/src/backend/base/langflow/components/data/Directory.py +++ /dev/null @@ -1,95 +0,0 @@ -from typing import List - -from langflow.base.data.utils import parallel_load_data, parse_text_file_to_data, retrieve_file_paths -from langflow.custom import Component -from langflow.io import BoolInput, IntInput, MessageTextInput -from langflow.schema import Data -from langflow.template import Output - - -class DirectoryComponent(Component): - display_name = "Directory" - description = "Recursively load files from a directory." - icon = "folder" - name = "Directory" - - inputs = [ - MessageTextInput( - name="path", - display_name="Path", - info="Path to the directory to load files from.", - ), - MessageTextInput( - name="types", - display_name="Types", - info="File types to load. Leave empty to load all types.", - is_list=True, - ), - IntInput( - name="depth", - display_name="Depth", - info="Depth to search for files.", - value=0, - ), - IntInput( - name="max_concurrency", - display_name="Max Concurrency", - advanced=True, - info="Maximum concurrency for loading files.", - value=2, - ), - BoolInput( - name="load_hidden", - display_name="Load Hidden", - advanced=True, - info="If true, hidden files will be loaded.", - ), - BoolInput( - name="recursive", - display_name="Recursive", - advanced=True, - info="If true, the search will be recursive.", - ), - BoolInput( - name="silent_errors", - display_name="Silent Errors", - advanced=True, - info="If true, errors will not raise an exception.", - ), - BoolInput( - name="use_multithreading", - display_name="Use Multithreading", - advanced=True, - info="If true, multithreading will be used.", - ), - ] - - outputs = [ - Output(display_name="Data", name="data", method="load_directory"), - ] - - def load_directory(self) -> List[Data]: - path = self.path - types = self.types or [] # self.types is already a list due to is_list=True - depth = self.depth - max_concurrency = self.max_concurrency - load_hidden = self.load_hidden - recursive = self.recursive - silent_errors = self.silent_errors - use_multithreading = self.use_multithreading - - resolved_path = self.resolve_path(path) - file_paths = retrieve_file_paths(resolved_path, load_hidden, recursive, depth) - - if types: - file_paths = [fp for fp in file_paths if any(fp.endswith(ext) for ext in types)] - - loaded_data = [] - - if use_multithreading: - loaded_data = parallel_load_data(file_paths, silent_errors, max_concurrency) - else: - loaded_data = [parse_text_file_to_data(file_path, silent_errors) for file_path in file_paths] - loaded_data = list(filter(None, loaded_data)) - self.status = loaded_data - return loaded_data # type: ignore diff --git a/src/backend/base/langflow/components/data/File.py b/src/backend/base/langflow/components/data/File.py deleted file mode 100644 index f36c1846e566..000000000000 --- a/src/backend/base/langflow/components/data/File.py +++ /dev/null @@ -1,49 +0,0 @@ -from pathlib import Path - -from langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data -from langflow.custom import Component -from langflow.io import BoolInput, FileInput, Output -from langflow.schema import Data - - -class FileComponent(Component): - display_name = "File" - description = "A generic file loader." - icon = "file-text" - name = "File" - - inputs = [ - FileInput( - name="path", - display_name="Path", - file_types=TEXT_FILE_TYPES, - info=f"Supported file types: {', '.join(TEXT_FILE_TYPES)}", - ), - BoolInput( - name="silent_errors", - display_name="Silent Errors", - advanced=True, - info="If true, errors will not raise an exception.", - ), - ] - - outputs = [ - Output(display_name="Data", name="data", method="load_file"), - ] - - def load_file(self) -> Data: - if not self.path: - raise ValueError("Please, upload a file to use this component.") - resolved_path = self.resolve_path(self.path) - silent_errors = self.silent_errors - - extension = Path(resolved_path).suffix[1:].lower() - - if extension == "doc": - raise ValueError("doc files are not supported. Please save as .docx") - if extension not in TEXT_FILE_TYPES: - raise ValueError(f"Unsupported file type: {extension}") - - data = parse_text_file_to_data(resolved_path, silent_errors) - self.status = data if data else "No data" - return data or Data() diff --git a/src/backend/base/langflow/components/data/Gmail.py b/src/backend/base/langflow/components/data/Gmail.py deleted file mode 100644 index 6832bc9eec6b..000000000000 --- a/src/backend/base/langflow/components/data/Gmail.py +++ /dev/null @@ -1,190 +0,0 @@ -import base64 -import re -import json -from typing import Any, Iterator, List, Optional -from google.oauth2.credentials import Credentials -from googleapiclient.discovery import build -from langflow.custom import Component -from langflow.inputs import MessageTextInput -from langflow.io import SecretStrInput -from langflow.template import Output -from langflow.schema import Data -from langchain_google_community.gmail.loader import GMailLoader -from langchain_core.chat_sessions import ChatSession -from langchain_core.messages import HumanMessage -from json.decoder import JSONDecodeError -from google.auth.exceptions import RefreshError - - -class GmailLoaderComponent(Component): - display_name = "Gmail Loader" - description = "Loads emails from Gmail using provided credentials." - icon = "Google" - - inputs = [ - SecretStrInput( - name="json_string", - display_name="JSON String of the Service Account Token", - info="JSON string containing OAuth 2.0 access token information for service account access", - required=True, - value=str("""{ - "account": "", - "client_id": "", - "client_secret": "", - "expiry": "", - "refresh_token": "", - "scopes": [ - "https://www.googleapis.com/auth/gmail.readonly", - ], - "token": "", - "token_uri": "https://oauth2.googleapis.com/token", - "universe_domain": "googleapis.com" - }"""), - ), - MessageTextInput( - name="label_ids", - display_name="Label IDs", - info="Comma-separated list of label IDs to filter emails.", - required=True, - value="INBOX,SENT,UNREAD,IMPORTANT", - ), - MessageTextInput( - name="max_results", - display_name="Max Results", - info="Maximum number of emails to load.", - required=True, - value="10", - ), - ] - - outputs = [ - Output(display_name="Data", name="data", method="load_emails"), - ] - - def load_emails(self) -> Data: - class CustomGMailLoader(GMailLoader): - def __init__( - self, creds: Any, n: int = 100, label_ids: Optional[List[str]] = None, raise_error: bool = False - ) -> None: - super().__init__(creds, n, raise_error) - self.label_ids = label_ids if label_ids is not None else ["SENT"] - - def clean_message_content(self, message): - # Remove URLs - message = re.sub(r"http\S+|www\S+|https\S+", "", message, flags=re.MULTILINE) - - # Remove email addresses - message = re.sub(r"\S+@\S+", "", message) - - # Remove special characters and excessive whitespace - message = re.sub(r"[^A-Za-z0-9\s]+", " ", message) - message = re.sub(r"\s{2,}", " ", message) - - # Trim leading and trailing whitespace - message = message.strip() - - return message - - def _extract_email_content(self, msg: Any) -> HumanMessage: - from_email = None - for values in msg["payload"]["headers"]: - name = values["name"] - if name == "From": - from_email = values["value"] - if from_email is None: - raise ValueError("From email not found.") - - if "parts" in msg["payload"]: - parts = msg["payload"]["parts"] - else: - parts = [msg["payload"]] - - for part in parts: - if part["mimeType"] == "text/plain": - data = part["body"]["data"] - data = base64.urlsafe_b64decode(data).decode("utf-8") - pattern = re.compile(r"\r\nOn .+(\r\n)*wrote:\r\n") - newest_response = re.split(pattern, data)[0] - message = HumanMessage( - content=self.clean_message_content(newest_response), - additional_kwargs={"sender": from_email}, - ) - return message - raise ValueError("No plain text part found in the email.") - - def _get_message_data(self, service: Any, message: Any) -> ChatSession: - msg = service.users().messages().get(userId="me", id=message["id"]).execute() - message_content = self._extract_email_content(msg) - - in_reply_to = None - email_data = msg["payload"]["headers"] - for values in email_data: - name = values["name"] - if name == "In-Reply-To": - in_reply_to = values["value"] - - thread_id = msg["threadId"] - - if in_reply_to: - thread = service.users().threads().get(userId="me", id=thread_id).execute() - messages = thread["messages"] - - response_email = None - for message in messages: - email_data = message["payload"]["headers"] - for values in email_data: - if values["name"] == "Message-ID": - message_id = values["value"] - if message_id == in_reply_to: - response_email = message - if response_email is None: - raise ValueError("Response email not found in the thread.") - starter_content = self._extract_email_content(response_email) - return ChatSession(messages=[starter_content, message_content]) - else: - return ChatSession(messages=[message_content]) - - def lazy_load(self) -> Iterator[ChatSession]: - service = build("gmail", "v1", credentials=self.creds) - results = ( - service.users().messages().list(userId="me", labelIds=self.label_ids, maxResults=self.n).execute() - ) - messages = results.get("messages", []) - if not messages: - print("No messages found with the specified labels.") - for message in messages: - try: - yield self._get_message_data(service, message) - except Exception as e: - if self.raise_error: - raise e - else: - print(f"Error processing message {message['id']}: {e}") - - json_string = self.json_string - label_ids = self.label_ids.split(",") if self.label_ids else ["INBOX"] - max_results = int(self.max_results) if self.max_results else 100 - - # Load the token information from the JSON string - try: - token_info = json.loads(json_string) - except JSONDecodeError as e: - raise ValueError("Invalid JSON string") from e - - creds = Credentials.from_authorized_user_info(token_info) - - # Initialize the custom loader with the provided credentials - loader = CustomGMailLoader(creds=creds, n=max_results, label_ids=label_ids) - - try: - docs = loader.load() - except RefreshError as e: - raise ValueError( - "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." - ) from e - except Exception as e: - raise ValueError(f"Error loading documents: {e}") from e - - # Return the loaded documents - self.status = docs - return Data(data={"text": docs}) diff --git a/src/backend/base/langflow/components/data/GoogleDrive.py b/src/backend/base/langflow/components/data/GoogleDrive.py deleted file mode 100644 index 4cc7450ebd20..000000000000 --- a/src/backend/base/langflow/components/data/GoogleDrive.py +++ /dev/null @@ -1,87 +0,0 @@ -import json -from typing import Optional -from google.oauth2.credentials import Credentials -from google.auth.exceptions import RefreshError -from langflow.custom import Component -from langflow.inputs import MessageTextInput -from langflow.io import SecretStrInput -from langflow.template import Output -from langflow.schema import Data -from langchain_google_community import GoogleDriveLoader -from langflow.helpers.data import docs_to_data - -from json.decoder import JSONDecodeError - - -class GoogleDriveComponent(Component): - display_name = "Google Drive Loader" - description = "Loads documents from Google Drive using provided credentials." - icon = "Google" - - inputs = [ - SecretStrInput( - name="json_string", - display_name="JSON String of the Service Account Token", - info="JSON string containing OAuth 2.0 access token information for service account access", - required=True, - ), - MessageTextInput( - name="document_id", display_name="Document ID", info="Single Google Drive document ID", required=True - ), - ] - - outputs = [ - Output(display_name="Loaded Documents", name="docs", method="load_documents"), - ] - - def load_documents(self) -> Data: - class CustomGoogleDriveLoader(GoogleDriveLoader): - creds: Optional[Credentials] = None - """Credentials object to be passed directly.""" - - def _load_credentials(self): - """Load credentials from the provided creds attribute or fallback to the original method.""" - if self.creds: - return self.creds - else: - raise ValueError("No credentials provided.") - - class Config: - arbitrary_types_allowed = True - - json_string = self.json_string - - document_ids = [self.document_id] - if len(document_ids) != 1: - raise ValueError("Expected a single document ID") - - # TODO: Add validation to check if the document ID is valid - - # Load the token information from the JSON string - try: - token_info = json.loads(json_string) - except JSONDecodeError as e: - raise ValueError("Invalid JSON string") from e - - # Initialize the custom loader with the provided credentials and document IDs - loader = CustomGoogleDriveLoader( - creds=Credentials.from_authorized_user_info(token_info), document_ids=document_ids - ) - - # Load the documents - try: - docs = loader.load() - # catch google.auth.exceptions.RefreshError - except RefreshError as e: - raise ValueError( - "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." - ) from e - except Exception as e: - raise ValueError(f"Error loading documents: {e}") from e - - assert len(docs) == 1, "Expected a single document to be loaded." - - data = docs_to_data(docs) - # Return the loaded documents - self.status = data - return Data(data={"text": data}) diff --git a/src/backend/base/langflow/components/data/GoogleDriveSearch.py b/src/backend/base/langflow/components/data/GoogleDriveSearch.py deleted file mode 100644 index 05353f25645c..000000000000 --- a/src/backend/base/langflow/components/data/GoogleDriveSearch.py +++ /dev/null @@ -1,157 +0,0 @@ -import json -from typing import List -from google.oauth2.credentials import Credentials -from googleapiclient.discovery import build -from langflow.custom import Component -from langflow.inputs import MessageTextInput, DropdownInput -from langflow.io import SecretStrInput -from langflow.template import Output -from langflow.schema import Data - - -class GoogleDriveSearchComponent(Component): - display_name = "Google Drive Search" - description = "Searches Google Drive files using provided credentials and query parameters." - icon = "Google" - - inputs = [ - SecretStrInput( - name="token_string", - display_name="Token String", - info="JSON string containing OAuth 2.0 access token information for service account access", - required=True, - ), - DropdownInput( - name="query_item", - display_name="Query Item", - options=[ - "name", - "fullText", - "mimeType", - "modifiedTime", - "viewedByMeTime", - "trashed", - "starred", - "parents", - "owners", - "writers", - "readers", - "sharedWithMe", - "createdTime", - "properties", - "appProperties", - "visibility", - "shortcutDetails.targetId", - ], - info="The field to query.", - required=True, - ), - DropdownInput( - name="valid_operator", - display_name="Valid Operator", - options=["contains", "=", "!=", "<=", "<", ">", ">=", "in", "has"], - info="Operator to use in the query.", - required=True, - ), - MessageTextInput( - name="search_term", - display_name="Search Term", - info="The value to search for in the specified query item.", - required=True, - ), - MessageTextInput( - name="query_string", - display_name="Query String", - info="The query string used for searching. You can edit this manually.", - value="", # This will be updated with the generated query string - ), - ] - - outputs = [ - Output(display_name="Document URLs", name="doc_urls", method="search_doc_urls"), - Output(display_name="Document IDs", name="doc_ids", method="search_doc_ids"), - Output(display_name="Document Titles", name="doc_titles", method="search_doc_titles"), - Output(display_name="Data", name="Data", method="search_data"), - ] - - def generate_query_string(self) -> str: - query_item = self.query_item - valid_operator = self.valid_operator - search_term = self.search_term - - # Construct the query string - query = f"{query_item} {valid_operator} '{search_term}'" - - # Update the editable query string input with the generated query - self.query_string = query - - return query - - def on_inputs_changed(self): - # Automatically regenerate the query string when inputs change - self.generate_query_string() - - def generate_file_url(self, file_id: str, mime_type: str) -> str: - """ - Generates the appropriate Google Drive URL for a file based on its MIME type. - """ - if mime_type == "application/vnd.google-apps.document": - return f"https://docs.google.com/document/d/{file_id}/edit" - elif mime_type == "application/vnd.google-apps.spreadsheet": - return f"https://docs.google.com/spreadsheets/d/{file_id}/edit" - elif mime_type == "application/vnd.google-apps.presentation": - return f"https://docs.google.com/presentation/d/{file_id}/edit" - elif mime_type == "application/vnd.google-apps.drawing": - return f"https://docs.google.com/drawings/d/{file_id}/edit" - elif mime_type == "application/pdf": - return f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk" - else: - return f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk" - - def search_files(self) -> dict: - # Load the token information from the JSON string - token_info = json.loads(self.token_string) - creds = Credentials.from_authorized_user_info(token_info) - - # Use the query string from the input (which might have been edited by the user) - query = self.query_string if self.query_string else self.generate_query_string() - - # Initialize the Google Drive API service - service = build("drive", "v3", credentials=creds) - - # Perform the search - results = service.files().list(q=query, pageSize=5, fields="nextPageToken, files(id, name, mimeType)").execute() - items = results.get("files", []) - - doc_urls = [] - doc_ids = [] - doc_titles_urls = [] - doc_titles = [] - - if items: - for item in items: - # Directly use the file ID, title, and MIME type to generate the URL - file_id = item["id"] - file_title = item["name"] - mime_type = item["mimeType"] - file_url = self.generate_file_url(file_id, mime_type) - - # Store the URL, ID, and title+URL in their respective lists - doc_urls.append(file_url) - doc_ids.append(file_id) - doc_titles.append(file_title) - doc_titles_urls.append({"title": file_title, "url": file_url}) - - return {"doc_urls": doc_urls, "doc_ids": doc_ids, "doc_titles_urls": doc_titles_urls, "doc_titles": doc_titles} - - def search_doc_ids(self) -> List[str]: - return self.search_files()["doc_ids"] - - def search_doc_urls(self) -> List[str]: - return self.search_files()["doc_urls"] - - def search_doc_titles(self) -> List[str]: - return self.search_files()["doc_titles"] - - def search_data(self) -> Data: - return Data(data={"text": self.search_files()["doc_titles_urls"]}) diff --git a/src/backend/base/langflow/components/data/URL.py b/src/backend/base/langflow/components/data/URL.py deleted file mode 100644 index 967ab20cfcea..000000000000 --- a/src/backend/base/langflow/components/data/URL.py +++ /dev/null @@ -1,68 +0,0 @@ -import re - -from langchain_community.document_loaders.web_base import WebBaseLoader - -from langflow.custom import Component -from langflow.io import MessageTextInput, Output -from langflow.schema import Data - - -class URLComponent(Component): - display_name = "URL" - description = "Fetch content from one or more URLs." - icon = "layout-template" - name = "URL" - - inputs = [ - MessageTextInput( - name="urls", - display_name="URLs", - info="Enter one or more URLs, by clicking the '+' button.", - is_list=True, - ), - ] - - outputs = [ - Output(display_name="Data", name="data", method="fetch_content"), - ] - - def ensure_url(self, string: str) -> str: - """ - Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'. - Raises an error if the string is not a valid URL. - - Parameters: - string (str): The string to be checked and possibly modified. - - Returns: - str: The modified string that is ensured to be a URL. - - Raises: - ValueError: If the string is not a valid URL. - """ - if not string.startswith(("http://", "https://")): - string = "http://" + string - - # Basic URL validation regex - url_regex = re.compile( - r"^(https?:\/\/)?" # optional protocol - r"(www\.)?" # optional www - r"([a-zA-Z0-9.-]+)" # domain - r"(\.[a-zA-Z]{2,})?" # top-level domain - r"(:\d+)?" # optional port - r"(\/[^\s]*)?$", # optional path - re.IGNORECASE, - ) - - if not url_regex.match(string): - raise ValueError(f"Invalid URL: {string}") - - return string - - def fetch_content(self) -> list[Data]: - urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()] - loader = WebBaseLoader(web_paths=urls, encoding="utf-8") - docs = loader.load() - data = [Data(text=doc.page_content, **doc.metadata) for doc in docs] - self.status = data - return data diff --git a/src/backend/base/langflow/components/data/Webhook.py b/src/backend/base/langflow/components/data/Webhook.py deleted file mode 100644 index e02367332fe7..000000000000 --- a/src/backend/base/langflow/components/data/Webhook.py +++ /dev/null @@ -1,38 +0,0 @@ -import json - -from langflow.custom import Component -from langflow.io import MultilineInput, Output -from langflow.schema import Data - - -class WebhookComponent(Component): - display_name = "Webhook Input" - description = "Defines a webhook input for the flow." - name = "Webhook" - - inputs = [ - MultilineInput( - name="data", - display_name="Data", - info="Use this field to quickly test the webhook component by providing a JSON payload.", - ) - ] - outputs = [ - Output(display_name="Data", name="output_data", method="build_data"), - ] - - def build_data(self) -> Data: - message: str | Data = "" - if not self.data: - self.status = "No data provided." - return Data(data={}) - try: - body = json.loads(self.data or "{}") - except json.JSONDecodeError: - body = {"payload": self.data} - message = f"Invalid JSON payload. Please check the format.\n\n{self.data}" - data = Data(data=body) - if not message: - message = data - self.status = message - return data diff --git a/src/backend/base/langflow/components/data/__init__.py b/src/backend/base/langflow/components/data/__init__.py index ba037a740f5e..820f683b8103 100644 --- a/src/backend/base/langflow/components/data/__init__.py +++ b/src/backend/base/langflow/components/data/__init__.py @@ -1,7 +1,19 @@ -from .APIRequest import APIRequestComponent -from .Directory import DirectoryComponent -from .File import FileComponent -from .URL import URLComponent -from .Webhook import WebhookComponent +from .api_request import APIRequestComponent +from .csv_to_data import CSVToDataComponent +from .directory import DirectoryComponent +from .file import FileComponent +from .json_to_data import JSONToDataComponent +from .sql_executor import SQLExecutorComponent +from .url import URLComponent +from .webhook import WebhookComponent -__all__ = ["APIRequestComponent", "DirectoryComponent", "FileComponent", "URLComponent", "WebhookComponent"] +__all__ = [ + "APIRequestComponent", + "CSVToDataComponent", + "DirectoryComponent", + "FileComponent", + "SQLExecutorComponent", + "URLComponent", + "WebhookComponent", + "JSONToDataComponent", +] diff --git a/src/backend/base/langflow/components/data/api_request.py b/src/backend/base/langflow/components/data/api_request.py new file mode 100644 index 000000000000..f5f1aa52138a --- /dev/null +++ b/src/backend/base/langflow/components/data/api_request.py @@ -0,0 +1,209 @@ +import asyncio +import json +from typing import Any +from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse + +import httpx +from loguru import logger + +from langflow.base.curl.parse import parse_context +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, IntInput, MessageTextInput, NestedDictInput, Output +from langflow.schema import Data +from langflow.schema.dotdict import dotdict + + +class APIRequestComponent(Component): + display_name = "API Request" + description = ( + "This component allows you to make HTTP requests to one or more URLs. " + "You can provide headers and body as either dictionaries or Data objects. " + "Additionally, you can append query parameters to the URLs.\n\n" + "**Note:** Check advanced options for more settings." + ) + icon = "Globe" + name = "APIRequest" + + inputs = [ + MessageTextInput( + name="urls", + display_name="URLs", + is_list=True, + info="Enter one or more URLs, separated by commas.", + ), + MessageTextInput( + name="curl", + display_name="cURL", + info="Paste a curl command to populate the fields. " + "This will fill in the dictionary fields for headers and body.", + advanced=False, + refresh_button=True, + real_time_refresh=True, + tool_mode=True, + ), + DropdownInput( + name="method", + display_name="Method", + options=["GET", "POST", "PATCH", "PUT"], + value="GET", + info="The HTTP method to use (GET, POST, PATCH, PUT).", + ), + NestedDictInput( + name="headers", + display_name="Headers", + info="The headers to send with the request as a dictionary. This is populated when using the CURL field.", + input_types=["Data"], + ), + NestedDictInput( + name="body", + display_name="Body", + info="The body to send with the request as a dictionary (for POST, PATCH, PUT). " + "This is populated when using the CURL field.", + input_types=["Data"], + ), + DataInput( + name="query_params", + display_name="Query Parameters", + info="The query parameters to append to the URL.", + tool_mode=True, + ), + IntInput( + name="timeout", + display_name="Timeout", + value=5, + info="The timeout to use for the request.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="make_requests"), + ] + + def parse_curl(self, curl: str, build_config: dotdict) -> dotdict: + try: + parsed = parse_context(curl) + build_config["urls"]["value"] = [parsed.url] + build_config["method"]["value"] = parsed.method.upper() + build_config["headers"]["value"] = dict(parsed.headers) + + if parsed.data: + try: + json_data = json.loads(parsed.data) + build_config["body"]["value"] = json_data + except json.JSONDecodeError: + logger.exception("Error decoding JSON data") + else: + build_config["body"]["value"] = {} + except Exception as exc: + msg = f"Error parsing curl: {exc}" + logger.exception(msg) + raise ValueError(msg) from exc + return build_config + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "curl" and field_value: + build_config = self.parse_curl(field_value, build_config) + return build_config + + async def make_request( + self, + client: httpx.AsyncClient, + method: str, + url: str, + headers: dict | None = None, + body: dict | None = None, + timeout: int = 5, + ) -> Data: + method = method.upper() + if method not in {"GET", "POST", "PATCH", "PUT", "DELETE"}: + msg = f"Unsupported method: {method}" + raise ValueError(msg) + + if isinstance(body, str) and body: + try: + body = json.loads(body) + except Exception as e: + msg = f"Error decoding JSON data: {e}" + logger.exception(msg) + body = None + raise ValueError(msg) from e + + data = body or None + + try: + response = await client.request(method, url, headers=headers, json=data, timeout=timeout) + try: + result = response.json() + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error decoding JSON response") + result = response.text + return Data( + data={ + "source": url, + "headers": headers, + "status_code": response.status_code, + "result": result, + }, + ) + except httpx.TimeoutException: + return Data( + data={ + "source": url, + "headers": headers, + "status_code": 408, + "error": "Request timed out", + }, + ) + except Exception as exc: # noqa: BLE001 + logger.opt(exception=True).debug(f"Error making request to {url}") + return Data( + data={ + "source": url, + "headers": headers, + "status_code": 500, + "error": str(exc), + }, + ) + + def add_query_params(self, url: str, params: dict) -> str: + url_parts = list(urlparse(url)) + query = dict(parse_qsl(url_parts[4])) + query.update(params) + url_parts[4] = urlencode(query) + return urlunparse(url_parts) + + async def make_requests(self) -> list[Data]: + method = self.method + urls = [url.strip() for url in self.urls if url.strip()] + curl = self.curl + headers = self.headers or {} + body = self.body or {} + timeout = self.timeout + + if isinstance(self.query_params, str): + query_params = dict(parse_qsl(self.query_params)) + else: + query_params = self.query_params.data if self.query_params else {} + + if curl: + self._build_config = self.parse_curl(curl, dotdict()) + + if isinstance(headers, Data): + headers = headers.data + + if isinstance(body, Data): + body = body.data + + bodies = [body] * len(urls) + + urls = [self.add_query_params(url, query_params) for url in urls] + + async with httpx.AsyncClient() as client: + results = await asyncio.gather( + *[ + self.make_request(client, method, u, headers, rec, timeout) + for u, rec in zip(urls, bodies, strict=True) + ] + ) + self.status = results + return results diff --git a/src/backend/base/langflow/components/data/csv_to_data.py b/src/backend/base/langflow/components/data/csv_to_data.py new file mode 100644 index 000000000000..e4fb7e6f0a16 --- /dev/null +++ b/src/backend/base/langflow/components/data/csv_to_data.py @@ -0,0 +1,89 @@ +import csv +import io +from pathlib import Path + +from langflow.custom import Component +from langflow.io import FileInput, MessageTextInput, MultilineInput, Output +from langflow.schema import Data + + +class CSVToDataComponent(Component): + display_name = "Load CSV" + description = "Load a CSV file, CSV from a file path, or a valid CSV string and convert it to a list of Data" + icon = "file-spreadsheet" + name = "CSVtoData" + legacy = True + + inputs = [ + FileInput( + name="csv_file", + display_name="CSV File", + file_types=["csv"], + info="Upload a CSV file to convert to a list of Data objects", + ), + MessageTextInput( + name="csv_path", + display_name="CSV File Path", + info="Provide the path to the CSV file as pure text", + ), + MultilineInput( + name="csv_string", + display_name="CSV String", + info="Paste a CSV string directly to convert to a list of Data objects", + ), + ] + + outputs = [ + Output(name="data_list", display_name="Data List", method="load_csv_to_data"), + ] + + def load_csv_to_data(self) -> list[Data]: + if sum(bool(field) for field in [self.csv_file, self.csv_path, self.csv_string]) != 1: + msg = "Please provide exactly one of: CSV file, file path, or CSV string." + raise ValueError(msg) + + csv_data = None + try: + if self.csv_file: + resolved_path = self.resolve_path(self.csv_file) + file_path = Path(resolved_path) + if file_path.suffix.lower() != ".csv": + self.status = "The provided file must be a CSV file." + else: + with file_path.open(newline="", encoding="utf-8") as csvfile: + csv_data = csvfile.read() + + elif self.csv_path: + file_path = Path(self.csv_path) + if file_path.suffix.lower() != ".csv": + self.status = "The provided file must be a CSV file." + else: + with file_path.open(newline="", encoding="utf-8") as csvfile: + csv_data = csvfile.read() + + else: + csv_data = self.csv_string + + if csv_data: + csv_reader = csv.DictReader(io.StringIO(csv_data)) + result = [Data(data=row) for row in csv_reader] + + if not result: + self.status = "The CSV data is empty." + return [] + + self.status = result + return result + + except csv.Error as e: + error_message = f"CSV parsing error: {e}" + self.status = error_message + raise ValueError(error_message) from e + + except Exception as e: + error_message = f"An error occurred: {e}" + self.status = error_message + raise ValueError(error_message) from e + + # An error occurred + raise ValueError(self.status) diff --git a/src/backend/base/langflow/components/data/directory.py b/src/backend/base/langflow/components/data/directory.py new file mode 100644 index 000000000000..c41b3e587f63 --- /dev/null +++ b/src/backend/base/langflow/components/data/directory.py @@ -0,0 +1,97 @@ +from langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data, retrieve_file_paths +from langflow.custom import Component +from langflow.io import BoolInput, IntInput, MessageTextInput +from langflow.schema import Data +from langflow.template import Output + + +class DirectoryComponent(Component): + display_name = "Directory" + description = "Recursively load files from a directory." + icon = "folder" + name = "Directory" + + inputs = [ + MessageTextInput( + name="path", + display_name="Path", + info="Path to the directory to load files from.", + ), + MessageTextInput( + name="types", + display_name="Types", + info="File types to load. Leave empty to load all default supported types.", + is_list=True, + ), + IntInput( + name="depth", + display_name="Depth", + info="Depth to search for files.", + value=0, + ), + IntInput( + name="max_concurrency", + display_name="Max Concurrency", + advanced=True, + info="Maximum concurrency for loading files.", + value=2, + ), + BoolInput( + name="load_hidden", + display_name="Load Hidden", + advanced=True, + info="If true, hidden files will be loaded.", + ), + BoolInput( + name="recursive", + display_name="Recursive", + advanced=True, + info="If true, the search will be recursive.", + ), + BoolInput( + name="silent_errors", + display_name="Silent Errors", + advanced=True, + info="If true, errors will not raise an exception.", + ), + BoolInput( + name="use_multithreading", + display_name="Use Multithreading", + advanced=True, + info="If true, multithreading will be used.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="load_directory"), + ] + + def load_directory(self) -> list[Data]: + path = self.path + types = ( + self.types if self.types and self.types != [""] else TEXT_FILE_TYPES + ) # self.types is already a list due to is_list=True + depth = self.depth + max_concurrency = self.max_concurrency + load_hidden = self.load_hidden + recursive = self.recursive + silent_errors = self.silent_errors + use_multithreading = self.use_multithreading + + resolved_path = self.resolve_path(path) + file_paths = retrieve_file_paths( + resolved_path, load_hidden=load_hidden, recursive=recursive, depth=depth, types=types + ) + + if types: + file_paths = [fp for fp in file_paths if any(fp.endswith(ext) for ext in types)] + + loaded_data = [] + + if use_multithreading: + loaded_data = parallel_load_data(file_paths, silent_errors=silent_errors, max_concurrency=max_concurrency) + else: + loaded_data = [parse_text_file_to_data(file_path, silent_errors=silent_errors) for file_path in file_paths] + loaded_data = list(filter(None, loaded_data)) + self.status = loaded_data + return loaded_data # type: ignore[return-value] diff --git a/src/backend/base/langflow/components/data/file.py b/src/backend/base/langflow/components/data/file.py new file mode 100644 index 000000000000..8a5088aaea01 --- /dev/null +++ b/src/backend/base/langflow/components/data/file.py @@ -0,0 +1,211 @@ +from pathlib import Path +from tempfile import NamedTemporaryFile +from zipfile import ZipFile, is_zipfile + +from langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data +from langflow.custom import Component +from langflow.io import BoolInput, FileInput, IntInput, Output +from langflow.schema import Data + + +class FileComponent(Component): + """Handles loading of individual or zipped text files. + + Processes multiple valid files within a zip archive if provided. + + Attributes: + display_name: Display name of the component. + description: Brief component description. + icon: Icon to represent the component. + name: Identifier for the component. + inputs: Inputs required by the component. + outputs: Output of the component after processing files. + """ + + display_name = "File" + description = "Load a file to be used in your project." + icon = "file-text" + name = "File" + + inputs = [ + FileInput( + name="path", + display_name="Path", + file_types=[*TEXT_FILE_TYPES, "zip"], + info=f"Supported file types: {', '.join([*TEXT_FILE_TYPES, 'zip'])}", + ), + BoolInput( + name="silent_errors", + display_name="Silent Errors", + advanced=True, + info="If true, errors will not raise an exception.", + ), + BoolInput( + name="use_multithreading", + display_name="Use Multithreading", + advanced=True, + info="If true, parallel processing will be enabled for zip files.", + ), + IntInput( + name="concurrency_multithreading", + display_name="Multithreading Concurrency", + advanced=True, + info="The maximum number of workers to use, if concurrency is enabled", + value=4, + ), + ] + + outputs = [Output(display_name="Data", name="data", method="load_file")] + + def load_file(self) -> Data: + """Load and parse file(s) from a zip archive. + + Raises: + ValueError: If no file is uploaded or file path is invalid. + + Returns: + Data: Parsed data from file(s). + """ + # Check if the file path is provided + if not self.path: + self.log("File path is missing.") + msg = "Please upload a file for processing." + + raise ValueError(msg) + + resolved_path = Path(self.resolve_path(self.path)) + try: + # Check if the file is a zip archive + if is_zipfile(resolved_path): + self.log(f"Processing zip file: {resolved_path.name}.") + + return self._process_zip_file( + resolved_path, + silent_errors=self.silent_errors, + parallel=self.use_multithreading, + ) + + self.log(f"Processing single file: {resolved_path.name}.") + + return self._process_single_file(resolved_path, silent_errors=self.silent_errors) + except FileNotFoundError: + self.log(f"File not found: {resolved_path.name}.") + + raise + + def _process_zip_file(self, zip_path: Path, *, silent_errors: bool = False, parallel: bool = False) -> Data: + """Process text files within a zip archive. + + Args: + zip_path: Path to the zip file. + silent_errors: Suppresses errors if True. + parallel: Enables parallel processing if True. + + Returns: + list[Data]: Combined data from all valid files. + + Raises: + ValueError: If no valid files found in the archive. + """ + data: list[Data] = [] + with ZipFile(zip_path, "r") as zip_file: + # Filter file names based on extensions in TEXT_FILE_TYPES and ignore hidden files + valid_files = [ + name + for name in zip_file.namelist() + if ( + any(name.endswith(ext) for ext in TEXT_FILE_TYPES) + and not name.startswith("__MACOSX") + and not name.startswith(".") + ) + ] + + # Raise an error if no valid files found + if not valid_files: + self.log("No valid files in the zip archive.") + + # Return empty data if silent_errors is True + if silent_errors: + return data # type: ignore[return-value] + + # Raise an error if no valid files found + msg = "No valid files in the zip archive." + raise ValueError(msg) + + # Define a function to process each file + def process_file(file_name, silent_errors=silent_errors): + with NamedTemporaryFile(delete=False) as temp_file: + temp_path = Path(temp_file.name).with_name(file_name) + with zip_file.open(file_name) as file_content: + temp_path.write_bytes(file_content.read()) + try: + return self._process_single_file(temp_path, silent_errors=silent_errors) + finally: + temp_path.unlink() + + # Process files in parallel if specified + if parallel: + self.log( + f"Initializing parallel Thread Pool Executor with max workers: " + f"{self.concurrency_multithreading}." + ) + + # Process files in parallel + initial_data = parallel_load_data( + valid_files, + silent_errors=silent_errors, + load_function=process_file, + max_concurrency=self.concurrency_multithreading, + ) + + # Filter out empty data + data = list(filter(None, initial_data)) + else: + # Sequential processing + data = [process_file(file_name) for file_name in valid_files] + + self.log(f"Successfully processed zip file: {zip_path.name}.") + + return data # type: ignore[return-value] + + def _process_single_file(self, file_path: Path, *, silent_errors: bool = False) -> Data: + """Process a single file. + + Args: + file_path: Path to the file. + silent_errors: Suppresses errors if True. + + Returns: + Data: Parsed data from the file. + + Raises: + ValueError: For unsupported file formats. + """ + # Check if the file type is supported + if not any(file_path.suffix == ext for ext in ["." + f for f in TEXT_FILE_TYPES]): + self.log(f"Unsupported file type: {file_path.suffix}") + + # Return empty data if silent_errors is True + if silent_errors: + return Data() + + msg = f"Unsupported file type: {file_path.suffix}" + raise ValueError(msg) + + try: + # Parse the text file as appropriate + data = parse_text_file_to_data(str(file_path), silent_errors=silent_errors) # type: ignore[assignment] + if not data: + data = Data() + + self.log(f"Successfully processed file: {file_path.name}.") + except Exception as e: + self.log(f"Error processing file {file_path.name}: {e}") + + # Return empty data if silent_errors is True + if not silent_errors: + raise + + data = Data() + + return data diff --git a/src/backend/base/langflow/components/data/json_to_data.py b/src/backend/base/langflow/components/data/json_to_data.py new file mode 100644 index 000000000000..9374cddaa655 --- /dev/null +++ b/src/backend/base/langflow/components/data/json_to_data.py @@ -0,0 +1,98 @@ +import json +from pathlib import Path + +from json_repair import repair_json + +from langflow.custom import Component +from langflow.io import FileInput, MessageTextInput, MultilineInput, Output +from langflow.schema import Data + + +class JSONToDataComponent(Component): + display_name = "Load JSON" + description = ( + "Convert a JSON file, JSON from a file path, or a JSON string to a Data object or a list of Data objects" + ) + icon = "braces" + name = "JSONtoData" + legacy = True + + inputs = [ + FileInput( + name="json_file", + display_name="JSON File", + file_types=["json"], + info="Upload a JSON file to convert to a Data object or list of Data objects", + ), + MessageTextInput( + name="json_path", + display_name="JSON File Path", + info="Provide the path to the JSON file as pure text", + ), + MultilineInput( + name="json_string", + display_name="JSON String", + info="Enter a valid JSON string (object or array) to convert to a Data object or list of Data objects", + ), + ] + + outputs = [ + Output(name="data", display_name="Data", method="convert_json_to_data"), + ] + + def convert_json_to_data(self) -> Data | list[Data]: + if sum(bool(field) for field in [self.json_file, self.json_path, self.json_string]) != 1: + msg = "Please provide exactly one of: JSON file, file path, or JSON string." + self.status = msg + raise ValueError(msg) + + json_data = None + + try: + if self.json_file: + resolved_path = self.resolve_path(self.json_file) + file_path = Path(resolved_path) + if file_path.suffix.lower() != ".json": + self.status = "The provided file must be a JSON file." + else: + json_data = file_path.read_text(encoding="utf-8") + + elif self.json_path: + file_path = Path(self.json_path) + if file_path.suffix.lower() != ".json": + self.status = "The provided file must be a JSON file." + else: + json_data = file_path.read_text(encoding="utf-8") + + else: + json_data = self.json_string + + if json_data: + # Try to parse the JSON string + try: + parsed_data = json.loads(json_data) + except json.JSONDecodeError: + # If JSON parsing fails, try to repair the JSON string + repaired_json_string = repair_json(json_data) + parsed_data = json.loads(repaired_json_string) + + # Check if the parsed data is a list + if isinstance(parsed_data, list): + result = [Data(data=item) for item in parsed_data] + else: + result = Data(data=parsed_data) + self.status = result + return result + + except (json.JSONDecodeError, SyntaxError, ValueError) as e: + error_message = f"Invalid JSON or Python literal: {e}" + self.status = error_message + raise ValueError(error_message) from e + + except Exception as e: + error_message = f"An error occurred: {e}" + self.status = error_message + raise ValueError(error_message) from e + + # An error occurred + raise ValueError(self.status) diff --git a/src/backend/base/langflow/components/data/sql_executor.py b/src/backend/base/langflow/components/data/sql_executor.py new file mode 100644 index 000000000000..add27a8b9b5c --- /dev/null +++ b/src/backend/base/langflow/components/data/sql_executor.py @@ -0,0 +1,74 @@ +from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool +from langchain_community.utilities import SQLDatabase + +from langflow.custom import CustomComponent +from langflow.field_typing import Text + + +class SQLExecutorComponent(CustomComponent): + display_name = "SQL Query" + description = "Execute SQL query." + name = "SQLExecutor" + beta: bool = True + + def build_config(self): + return { + "database_url": { + "display_name": "Database URL", + "info": "The URL of the database.", + }, + "include_columns": { + "display_name": "Include Columns", + "info": "Include columns in the result.", + }, + "passthrough": { + "display_name": "Passthrough", + "info": "If an error occurs, return the query instead of raising an exception.", + }, + "add_error": { + "display_name": "Add Error", + "info": "Add the error to the result.", + }, + } + + def clean_up_uri(self, uri: str) -> str: + if uri.startswith("postgresql://"): + uri = uri.replace("postgresql://", "postgres://") + return uri.strip() + + def build( + self, + query: str, + database_url: str, + *, + include_columns: bool = False, + passthrough: bool = False, + add_error: bool = False, + **kwargs, + ) -> Text: + _ = kwargs + error = None + try: + database = SQLDatabase.from_uri(database_url) + except Exception as e: + msg = f"An error occurred while connecting to the database: {e}" + raise ValueError(msg) from e + try: + tool = QuerySQLDataBaseTool(db=database) + result = tool.run(query, include_columns=include_columns) + self.status = result + except Exception as e: + result = str(e) + self.status = result + if not passthrough: + raise + error = repr(e) + + if add_error and error is not None: + result = f"{result}\n\nError: {error}\n\nQuery: {query}" + elif error is not None: + # Then we won't add the error to the result + # but since we are in passthrough mode, we will return the query + result = query + + return result diff --git a/src/backend/base/langflow/components/data/url.py b/src/backend/base/langflow/components/data/url.py new file mode 100644 index 000000000000..08dc08f48de4 --- /dev/null +++ b/src/backend/base/langflow/components/data/url.py @@ -0,0 +1,90 @@ +import re + +from langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader + +from langflow.custom import Component +from langflow.helpers.data import data_to_text +from langflow.io import DropdownInput, MessageTextInput, Output +from langflow.schema import Data +from langflow.schema.message import Message + + +class URLComponent(Component): + display_name = "URL" + description = "Fetch content from one or more URLs." + icon = "layout-template" + name = "URL" + + inputs = [ + MessageTextInput( + name="urls", + display_name="URLs", + info="Enter one or more URLs, by clicking the '+' button.", + is_list=True, + tool_mode=True, + ), + DropdownInput( + name="format", + display_name="Output format", + info="Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", + options=["Text", "Raw HTML"], + value="Text", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="fetch_content"), + Output(display_name="Text", name="text", method="fetch_content_text"), + ] + + def ensure_url(self, string: str) -> str: + """Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'. + + Raises an error if the string is not a valid URL. + + Parameters: + string (str): The string to be checked and possibly modified. + + Returns: + str: The modified string that is ensured to be a URL. + + Raises: + ValueError: If the string is not a valid URL. + """ + if not string.startswith(("http://", "https://")): + string = "http://" + string + + # Basic URL validation regex + url_regex = re.compile( + r"^(https?:\/\/)?" # optional protocol + r"(www\.)?" # optional www + r"([a-zA-Z0-9.-]+)" # domain + r"(\.[a-zA-Z]{2,})?" # top-level domain + r"(:\d+)?" # optional port + r"(\/[^\s]*)?$", # optional path + re.IGNORECASE, + ) + + if not url_regex.match(string): + msg = f"Invalid URL: {string}" + raise ValueError(msg) + + return string + + def fetch_content(self) -> list[Data]: + urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()] + if self.format == "Raw HTML": + loader = AsyncHtmlLoader(web_path=urls, encoding="utf-8") + else: + loader = WebBaseLoader(web_paths=urls, encoding="utf-8") + docs = loader.load() + data = [Data(text=doc.page_content, **doc.metadata) for doc in docs] + self.status = data + return data + + def fetch_content_text(self) -> Message: + data = self.fetch_content() + + result_string = data_to_text("{text}", data) + self.status = result_string + return Message(text=result_string) diff --git a/src/backend/base/langflow/components/data/webhook.py b/src/backend/base/langflow/components/data/webhook.py new file mode 100644 index 000000000000..2f58855d856c --- /dev/null +++ b/src/backend/base/langflow/components/data/webhook.py @@ -0,0 +1,38 @@ +import json + +from langflow.custom import Component +from langflow.io import MultilineInput, Output +from langflow.schema import Data + + +class WebhookComponent(Component): + display_name = "Webhook" + description = "Defines a webhook input for the flow." + name = "Webhook" + + inputs = [ + MultilineInput( + name="data", + display_name="Payload", + info="Receives a payload from external systems via HTTP POST.", + ) + ] + outputs = [ + Output(display_name="Data", name="output_data", method="build_data"), + ] + + def build_data(self) -> Data: + message: str | Data = "" + if not self.data: + self.status = "No data provided." + return Data(data={}) + try: + body = json.loads(self.data or "{}") + except json.JSONDecodeError: + body = {"payload": self.data} + message = f"Invalid JSON payload. Please check the format.\n\n{self.data}" + data = Data(data=body) + if not message: + message = data + self.status = message + return data diff --git a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py b/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py deleted file mode 100644 index a1cff2aed5fd..000000000000 --- a/src/backend/base/langflow/components/deactivated/ChatLiteLLMModel.py +++ /dev/null @@ -1,157 +0,0 @@ -from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException - -from langflow.base.constants import STREAM_INFO_TEXT -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import ( - BoolInput, - DictInput, - DropdownInput, - FloatInput, - IntInput, - MessageInput, - SecretStrInput, - StrInput, -) - - -class ChatLiteLLMModelComponent(LCModelComponent): - display_name = "LiteLLM" - description = "`LiteLLM` collection of large language models." - documentation = "https://python.langchain.com/docs/integrations/chat/litellm" - icon = "🚄" - - inputs = [ - MessageInput(name="input_value", display_name="Input"), - StrInput( - name="model", - display_name="Model name", - advanced=False, - required=True, - info="The name of the model to use. For example, `gpt-3.5-turbo`.", - ), - SecretStrInput( - name="api_key", - display_name="API Key", - advanced=False, - required=False, - ), - DropdownInput( - name="provider", - display_name="Provider", - info="The provider of the API key.", - options=[ - "OpenAI", - "Azure", - "Anthropic", - "Replicate", - "Cohere", - "OpenRouter", - ], - ), - FloatInput( - name="temperature", - display_name="Temperature", - advanced=False, - required=False, - value=0.7, - ), - DictInput( - name="kwargs", - display_name="Kwargs", - advanced=True, - required=False, - is_list=True, - value={}, - ), - DictInput( - name="model_kwargs", - display_name="Model kwargs", - advanced=True, - required=False, - is_list=True, - value={}, - ), - FloatInput(name="top_p", display_name="Top p", advanced=True, required=False, value=0.5), - IntInput(name="top_k", display_name="Top k", advanced=True, required=False, value=35), - IntInput( - name="n", - display_name="N", - advanced=True, - required=False, - info="Number of chat completions to generate for each prompt. " - "Note that the API may not return the full n completions if duplicates are generated.", - value=1, - ), - IntInput( - name="max_tokens", - display_name="Max tokens", - advanced=False, - value=256, - info="The maximum number of tokens to generate for each chat completion.", - ), - IntInput( - name="max_retries", - display_name="Max retries", - advanced=True, - required=False, - value=6, - ), - BoolInput( - name="verbose", - display_name="Verbose", - advanced=True, - required=False, - value=False, - ), - BoolInput( - name="stream", - display_name="Stream", - info=STREAM_INFO_TEXT, - advanced=True, - ), - StrInput( - name="system_message", - display_name="System Message", - info="System message to pass to the model.", - advanced=True, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - try: - import litellm # type: ignore - - litellm.drop_params = True - litellm.set_verbose = self.verbose - except ImportError: - raise ChatLiteLLMException( - "Could not import litellm python package. " "Please install it with `pip install litellm`" - ) - # Remove empty keys - if "" in self.kwargs: - del self.kwargs[""] - if "" in self.model_kwargs: - del self.model_kwargs[""] - # Report missing fields for Azure provider - if self.provider == "Azure": - if "api_base" not in self.kwargs: - raise Exception("Missing api_base on kwargs") - if "api_version" not in self.model_kwargs: - raise Exception("Missing api_version on model_kwargs") - output = ChatLiteLLM( - model=f"{self.provider.lower()}/{self.model}", - client=None, - streaming=self.stream, - temperature=self.temperature, - model_kwargs=self.model_kwargs if self.model_kwargs is not None else {}, - top_p=self.top_p, - top_k=self.top_k, - n=self.n, - max_tokens=self.max_tokens, - max_retries=self.max_retries, - **self.kwargs, - ) - output.client.api_key = self.api_key - - return output # type: ignore diff --git a/src/backend/base/langflow/components/deactivated/DocumentsToData.py b/src/backend/base/langflow/components/deactivated/DocumentsToData.py deleted file mode 100644 index 3d0f65970274..000000000000 --- a/src/backend/base/langflow/components/deactivated/DocumentsToData.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import List - -from langchain_core.documents import Document - -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class DocumentsToDataComponent(CustomComponent): - display_name = "Documents ⇢ Data" - description = "Convert LangChain Documents into Data." - icon = "LangChain" - name = "DocumentsToData" - - field_config = { - "documents": {"display_name": "Documents"}, - } - - def build(self, documents: List[Document]) -> List[Data]: - if isinstance(documents, Document): - documents = [documents] - data = [Data.from_document(document) for document in documents] - self.status = data - return data diff --git a/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py b/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py deleted file mode 100644 index 01f7abd008c5..000000000000 --- a/src/backend/base/langflow/components/deactivated/ExtractKeyFromData.py +++ /dev/null @@ -1,46 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class ExtractKeyFromDataComponent(CustomComponent): - display_name = "Extract Key From Data" - description = "Extracts a key from a data." - beta: bool = True - name = "ExtractKeyFromData" - - field_config = { - "data": {"display_name": "Data"}, - "keys": { - "display_name": "Keys", - "info": "The keys to extract from the data.", - "input_types": [], - }, - "silent_error": { - "display_name": "Silent Errors", - "info": "If True, errors will not be raised.", - "advanced": True, - }, - } - - def build(self, data: Data, keys: list[str], silent_error: bool = True) -> Data: - """ - Extracts the keys from a data. - - Args: - data (Data): The data from which to extract the keys. - keys (list[str]): The keys to extract from the data. - silent_error (bool): If True, errors will not be raised. - - Returns: - dict: The extracted keys. - """ - extracted_keys = {} - for key in keys: - try: - extracted_keys[key] = getattr(data, key) - except AttributeError: - if not silent_error: - raise KeyError(f"The key '{key}' does not exist in the data.") - return_data = Data(data=extracted_keys) - self.status = return_data - return return_data diff --git a/src/backend/base/langflow/components/deactivated/ListFlows.py b/src/backend/base/langflow/components/deactivated/ListFlows.py deleted file mode 100644 index fa46839a2633..000000000000 --- a/src/backend/base/langflow/components/deactivated/ListFlows.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import List - -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class ListFlowsComponent(CustomComponent): - display_name = "List Flows" - description = "A component to list all available flows." - icon = "ListFlows" - beta: bool = True - name = "ListFlows" - - def build_config(self): - return {} - - def build( - self, - ) -> List[Data]: - flows = self.list_flows() - self.status = flows - return flows diff --git a/src/backend/base/langflow/components/deactivated/MergeData.py b/src/backend/base/langflow/components/deactivated/MergeData.py deleted file mode 100644 index 3d2ac582c81d..000000000000 --- a/src/backend/base/langflow/components/deactivated/MergeData.py +++ /dev/null @@ -1,27 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class MergeDataComponent(CustomComponent): - display_name = "Merge Data" - description = "Merges data." - beta: bool = True - name = "MergeData" - - field_config = { - "data": {"display_name": "Data"}, - } - - def build(self, data: list[Data]) -> Data: - if not data: - return Data() - if len(data) == 1: - return data[0] - merged_data = Data() - for value in data: - if merged_data is None: - merged_data = value - else: - merged_data += value - self.status = merged_data - return merged_data diff --git a/src/backend/base/langflow/components/deactivated/Message.py b/src/backend/base/langflow/components/deactivated/Message.py deleted file mode 100644 index ae9313475ec2..000000000000 --- a/src/backend/base/langflow/components/deactivated/Message.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Optional - -from langflow.custom import CustomComponent -from langflow.schema.message import Message -from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER - - -class MessageComponent(CustomComponent): - display_name = "Message" - description = "Creates a Message object given a Session ID." - name = "Message" - - def build_config(self): - return { - "sender": { - "options": [MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], - "display_name": "Sender Type", - }, - "sender_name": {"display_name": "Sender Name"}, - "text": {"display_name": "Text"}, - "session_id": { - "display_name": "Session ID", - "info": "Session ID of the chat history.", - "input_types": ["Text"], - }, - } - - def build( - self, - sender: str = MESSAGE_SENDER_USER, - sender_name: Optional[str] = None, - session_id: Optional[str] = None, - text: str = "", - ) -> Message: - message = Message( - text=text, sender=sender, sender_name=sender_name, flow_id=self.graph.flow_id, session_id=session_id - ) - - self.status = message - return message diff --git a/src/backend/base/langflow/components/deactivated/SelectivePassThrough.py b/src/backend/base/langflow/components/deactivated/SelectivePassThrough.py deleted file mode 100644 index 4a33418953d1..000000000000 --- a/src/backend/base/langflow/components/deactivated/SelectivePassThrough.py +++ /dev/null @@ -1,76 +0,0 @@ -from langflow.custom import Component -from langflow.field_typing import Text -from langflow.io import BoolInput, DropdownInput, MessageTextInput, Output - - -class SelectivePassThroughComponent(Component): - display_name = "Selective Pass Through" - description = "Passes the specified value if a specified condition is met." - icon = "filter" - name = "SelectivePassThrough" - - inputs = [ - MessageTextInput( - name="input_value", - display_name="Input Value", - info="The primary input value to evaluate.", - ), - MessageTextInput( - name="comparison_value", - display_name="Comparison Value", - info="The value to compare against the input value.", - ), - DropdownInput( - name="operator", - display_name="Operator", - options=["equals", "not equals", "contains", "starts with", "ends with"], - info="Condition to evaluate the input value.", - ), - MessageTextInput( - name="value_to_pass", - display_name="Value to Pass", - info="The value to pass if the condition is met.", - ), - BoolInput( - name="case_sensitive", - display_name="Case Sensitive", - info="If true, the comparison will be case sensitive.", - value=False, - advanced=True, - ), - ] - - outputs = [ - Output(display_name="Passed Output", name="passed_output", method="pass_through"), - ] - - def evaluate_condition(self, input_value: str, comparison_value: str, operator: str, case_sensitive: bool) -> bool: - if not case_sensitive: - input_value = input_value.lower() - comparison_value = comparison_value.lower() - - if operator == "equals": - return input_value == comparison_value - elif operator == "not equals": - return input_value != comparison_value - elif operator == "contains": - return comparison_value in input_value - elif operator == "starts with": - return input_value.startswith(comparison_value) - elif operator == "ends with": - return input_value.endswith(comparison_value) - return False - - def pass_through(self) -> Text: - input_value = self.input_value - comparison_value = self.comparison_value - operator = self.operator - value_to_pass = self.value_to_pass - case_sensitive = self.case_sensitive - - if self.evaluate_condition(input_value, comparison_value, operator, case_sensitive): - self.status = value_to_pass - return value_to_pass - else: - self.status = "" - return "" diff --git a/src/backend/base/langflow/components/deactivated/ShouldRunNext.py b/src/backend/base/langflow/components/deactivated/ShouldRunNext.py deleted file mode 100644 index 33c8ce587289..000000000000 --- a/src/backend/base/langflow/components/deactivated/ShouldRunNext.py +++ /dev/null @@ -1,34 +0,0 @@ -from langchain_core.messages import BaseMessage -from langchain_core.prompts import PromptTemplate - -from langflow.custom import CustomComponent -from langflow.field_typing import LanguageModel, Text - - -class ShouldRunNextComponent(CustomComponent): - display_name = "Should Run Next" - description = "Determines if a vertex is runnable." - name = "ShouldRunNext" - - def build(self, llm: LanguageModel, question: str, context: str, retries: int = 3) -> Text: - template = "Given the following question and the context below, answer with a yes or no.\n\n{error_message}\n\nQuestion: {question}\n\nContext: {context}\n\nAnswer:" - - prompt = PromptTemplate.from_template(template) - chain = prompt | llm - error_message = "" - for i in range(retries): - result = chain.invoke( - dict(question=question, context=context, error_message=error_message), - config={"callbacks": self.get_langchain_callbacks()}, - ) - if isinstance(result, BaseMessage): - content = result.content - elif isinstance(result, str): - content = result - if isinstance(content, str) and content.lower().strip() in ["yes", "no"]: - break - condition = str(content).lower().strip() == "yes" - self.status = f"Should Run Next: {condition}" - if condition is False: - self.stop() - return context diff --git a/src/backend/base/langflow/components/deactivated/SplitText.py b/src/backend/base/langflow/components/deactivated/SplitText.py deleted file mode 100644 index 88f911057e91..000000000000 --- a/src/backend/base/langflow/components/deactivated/SplitText.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -from langchain_text_splitters import CharacterTextSplitter - -from langflow.custom import Component -from langflow.io import HandleInput, IntInput, MessageTextInput, Output -from langflow.schema import Data -from langflow.utils.util import unescape_string - - -class SplitTextComponent(Component): - display_name: str = "Split Text" - description: str = "Split text into chunks based on specified criteria." - icon = "scissors-line-dashed" - name = "SplitText" - - inputs = [ - HandleInput( - name="data_inputs", - display_name="Data Inputs", - info="The data to split.", - input_types=["Data"], - is_list=True, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="Number of characters to overlap between chunks.", - value=200, - ), - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum number of characters in each chunk.", - value=1000, - ), - MessageTextInput( - name="separator", - display_name="Separator", - info="The character to split on. Defaults to newline.", - value="\n", - ), - ] - - outputs = [ - Output(display_name="Chunks", name="chunks", method="split_text"), - ] - - def _docs_to_data(self, docs): - data = [] - for doc in docs: - data.append(Data(text=doc.page_content, data=doc.metadata)) - return data - - def split_text(self) -> List[Data]: - separator = unescape_string(self.separator) - - documents = [] - for _input in self.data_inputs: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - - splitter = CharacterTextSplitter( - chunk_overlap=self.chunk_overlap, - chunk_size=self.chunk_size, - separator=separator, - ) - docs = splitter.split_documents(documents) - data = self._docs_to_data(docs) - self.status = data - return data diff --git a/src/backend/base/langflow/components/deactivated/StoreMessage.py b/src/backend/base/langflow/components/deactivated/StoreMessage.py deleted file mode 100644 index 64a770aeadfa..000000000000 --- a/src/backend/base/langflow/components/deactivated/StoreMessage.py +++ /dev/null @@ -1,23 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.memory import get_messages, store_message -from langflow.schema.message import Message - - -class StoreMessageComponent(CustomComponent): - display_name = "Store Message" - description = "Stores a chat message." - name = "StoreMessage" - - def build_config(self): - return { - "message": {"display_name": "Message"}, - } - - def build( - self, - message: Message, - ) -> Message: - store_message(message, flow_id=self.graph.flow_id) - self.status = get_messages() - - return message diff --git a/src/backend/base/langflow/components/deactivated/SubFlow.py b/src/backend/base/langflow/components/deactivated/SubFlow.py deleted file mode 100644 index 6207e85f069b..000000000000 --- a/src/backend/base/langflow/components/deactivated/SubFlow.py +++ /dev/null @@ -1,116 +0,0 @@ -from typing import Any, List, Optional - -from langflow.base.flow_processing.utils import build_data_from_result_data -from langflow.custom import CustomComponent -from langflow.graph.graph.base import Graph -from langflow.graph.schema import RunOutputs -from langflow.graph.vertex.base import Vertex -from langflow.helpers.flow import get_flow_inputs -from langflow.schema import Data -from langflow.schema.dotdict import dotdict -from langflow.template.field.base import Input -from loguru import logger - - -class SubFlowComponent(CustomComponent): - display_name = "Sub Flow" - description = ( - "Dynamically Generates a Component from a Flow. The output is a list of data with keys 'result' and 'message'." - ) - beta: bool = True - field_order = ["flow_name"] - name = "SubFlow" - - def get_flow_names(self) -> List[str]: - flow_datas = self.list_flows() - return [flow_data.data["name"] for flow_data in flow_datas] - - def get_flow(self, flow_name: str) -> Optional[Data]: - flow_datas = self.list_flows() - for flow_data in flow_datas: - if flow_data.data["name"] == flow_name: - return flow_data - return None - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - logger.debug(f"Updating build config with field value {field_value} and field name {field_name}") - if field_name == "flow_name": - build_config["flow_name"]["options"] = self.get_flow_names() - # Clean up the build config - for key in list(build_config.keys()): - if key not in self.field_order + ["code", "_type", "get_final_results_only"]: - del build_config[key] - if field_value is not None and field_name == "flow_name": - try: - flow_data = self.get_flow(field_value) - if not flow_data: - raise ValueError(f"Flow {field_value} not found.") - graph = Graph.from_payload(flow_data.data["data"]) - # Get all inputs from the graph - inputs = get_flow_inputs(graph) - # Add inputs to the build config - build_config = self.add_inputs_to_build_config(inputs, build_config) - except Exception as e: - logger.error(f"Error getting flow {field_value}: {str(e)}") - - return build_config - - def add_inputs_to_build_config(self, inputs: List[Vertex], build_config: dotdict): - new_fields: list[Input] = [] - for vertex in inputs: - field = Input( - display_name=vertex.display_name, - name=vertex.id, - info=vertex.description, - field_type="str", - value=None, - ) - new_fields.append(field) - logger.debug(new_fields) - for field in new_fields: - build_config[field.name] = field.to_dict() - return build_config - - def build_config(self): - return { - "input_value": { - "display_name": "Input Value", - "multiline": True, - }, - "flow_name": { - "display_name": "Flow Name", - "info": "The name of the flow to run.", - "options": [], - "real_time_refresh": True, - "refresh_button": True, - }, - "tweaks": { - "display_name": "Tweaks", - "info": "Tweaks to apply to the flow.", - }, - "get_final_results_only": { - "display_name": "Get Final Results Only", - "info": "If False, the output will contain all outputs from the flow.", - "advanced": True, - }, - } - - async def build(self, flow_name: str, get_final_results_only: bool = True, **kwargs) -> List[Data]: - tweaks = {key: {"input_value": value} for key, value in kwargs.items()} - run_outputs: List[Optional[RunOutputs]] = await self.run_flow( - tweaks=tweaks, - flow_name=flow_name, - ) - if not run_outputs: - return [] - run_output = run_outputs[0] - - data = [] - if run_output is not None: - for output in run_output.outputs: - if output: - data.extend(build_data_from_result_data(output, get_final_results_only)) - - self.status = data - logger.debug(data) - return data diff --git a/src/backend/base/langflow/components/deactivated/__init__.py b/src/backend/base/langflow/components/deactivated/__init__.py index 8be439d874b2..9cf7ac2dcf28 100644 --- a/src/backend/base/langflow/components/deactivated/__init__.py +++ b/src/backend/base/langflow/components/deactivated/__init__.py @@ -1,21 +1,21 @@ -from .ExtractKeyFromData import ExtractKeyFromDataComponent -from .ListFlows import ListFlowsComponent -from .MergeData import MergeDataComponent -from .SelectivePassThrough import SelectivePassThroughComponent -from .SubFlow import SubFlowComponent +from .extract_key_from_data import ExtractKeyFromDataComponent +from .list_flows import ListFlowsComponent +from .merge_data import MergeDataComponent +from .selective_passthrough import SelectivePassThroughComponent +from .split_text import SplitTextComponent +from .sub_flow import SubFlowComponent __all__ = [ - "ConditionalRouterComponent", "ExtractKeyFromDataComponent", "FlowToolComponent", - "ListenComponent", "ListFlowsComponent", + "ListenComponent", "MergeDataComponent", "NotifyComponent", "PythonFunctionComponent", "RunFlowComponent", - "SplitTextComponent", "SQLExecutorComponent", - "SubFlowComponent", "SelectivePassThroughComponent", + "SplitTextComponent", + "SubFlowComponent", ] diff --git a/src/backend/base/langflow/components/deactivated/chat_litellm_model.py b/src/backend/base/langflow/components/deactivated/chat_litellm_model.py new file mode 100644 index 000000000000..10ac5b189739 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/chat_litellm_model.py @@ -0,0 +1,158 @@ +from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.io import ( + BoolInput, + DictInput, + DropdownInput, + FloatInput, + IntInput, + MessageInput, + SecretStrInput, + StrInput, +) + + +class ChatLiteLLMModelComponent(LCModelComponent): + display_name = "LiteLLM" + description = "`LiteLLM` collection of large language models." + documentation = "https://python.langchain.com/docs/integrations/chat/litellm" + icon = "🚄" + + inputs = [ + MessageInput(name="input_value", display_name="Input"), + StrInput( + name="model", + display_name="Model name", + advanced=False, + required=True, + info="The name of the model to use. For example, `gpt-3.5-turbo`.", + ), + SecretStrInput( + name="api_key", + display_name="API Key", + advanced=False, + required=False, + ), + DropdownInput( + name="provider", + display_name="Provider", + info="The provider of the API key.", + options=[ + "OpenAI", + "Azure", + "Anthropic", + "Replicate", + "Cohere", + "OpenRouter", + ], + ), + FloatInput( + name="temperature", + display_name="Temperature", + advanced=False, + required=False, + value=0.7, + ), + DictInput( + name="kwargs", + display_name="Kwargs", + advanced=True, + required=False, + is_list=True, + value={}, + ), + DictInput( + name="model_kwargs", + display_name="Model kwargs", + advanced=True, + required=False, + is_list=True, + value={}, + ), + FloatInput(name="top_p", display_name="Top p", advanced=True, required=False, value=0.5), + IntInput(name="top_k", display_name="Top k", advanced=True, required=False, value=35), + IntInput( + name="n", + display_name="N", + advanced=True, + required=False, + info="Number of chat completions to generate for each prompt. " + "Note that the API may not return the full n completions if duplicates are generated.", + value=1, + ), + IntInput( + name="max_tokens", + display_name="Max tokens", + advanced=False, + value=256, + info="The maximum number of tokens to generate for each chat completion.", + ), + IntInput( + name="max_retries", + display_name="Max retries", + advanced=True, + required=False, + value=6, + ), + BoolInput( + name="verbose", + display_name="Verbose", + advanced=True, + required=False, + value=False, + ), + BoolInput( + name="stream", + display_name="Stream", + info=STREAM_INFO_TEXT, + advanced=True, + ), + StrInput( + name="system_message", + display_name="System Message", + info="System message to pass to the model.", + advanced=True, + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + try: + import litellm + + litellm.drop_params = True + litellm.set_verbose = self.verbose + except ImportError as e: + msg = "Could not import litellm python package. Please install it with `pip install litellm`" + raise ChatLiteLLMException(msg) from e + # Remove empty keys + if "" in self.kwargs: + del self.kwargs[""] + if "" in self.model_kwargs: + del self.model_kwargs[""] + # Report missing fields for Azure provider + if self.provider == "Azure": + if "api_base" not in self.kwargs: + msg = "Missing api_base on kwargs" + raise ValueError(msg) + if "api_version" not in self.model_kwargs: + msg = "Missing api_version on model_kwargs" + raise ValueError(msg) + output = ChatLiteLLM( + model=f"{self.provider.lower()}/{self.model}", + client=None, + streaming=self.stream, + temperature=self.temperature, + model_kwargs=self.model_kwargs if self.model_kwargs is not None else {}, + top_p=self.top_p, + top_k=self.top_k, + n=self.n, + max_tokens=self.max_tokens, + max_retries=self.max_retries, + **self.kwargs, + ) + output.client.api_key = self.api_key + + return output diff --git a/src/backend/base/langflow/components/deactivated/CodeBlockExtractor.py b/src/backend/base/langflow/components/deactivated/code_block_extractor.py similarity index 100% rename from src/backend/base/langflow/components/deactivated/CodeBlockExtractor.py rename to src/backend/base/langflow/components/deactivated/code_block_extractor.py diff --git a/src/backend/base/langflow/components/deactivated/documents_to_data.py b/src/backend/base/langflow/components/deactivated/documents_to_data.py new file mode 100644 index 000000000000..5eaf6b60e06d --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/documents_to_data.py @@ -0,0 +1,22 @@ +from langchain_core.documents import Document + +from langflow.custom import CustomComponent +from langflow.schema import Data + + +class DocumentsToDataComponent(CustomComponent): + display_name = "Documents ⇢ Data" + description = "Convert LangChain Documents into Data." + icon = "LangChain" + name = "DocumentsToData" + + field_config = { + "documents": {"display_name": "Documents"}, + } + + def build(self, documents: list[Document]) -> list[Data]: + if isinstance(documents, Document): + documents = [documents] + data = [Data.from_document(document) for document in documents] + self.status = data + return data diff --git a/src/backend/base/langflow/components/deactivated/Embed.py b/src/backend/base/langflow/components/deactivated/embed.py similarity index 100% rename from src/backend/base/langflow/components/deactivated/Embed.py rename to src/backend/base/langflow/components/deactivated/embed.py diff --git a/src/backend/base/langflow/components/deactivated/extract_key_from_data.py b/src/backend/base/langflow/components/deactivated/extract_key_from_data.py new file mode 100644 index 000000000000..3882d0d987b8 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/extract_key_from_data.py @@ -0,0 +1,46 @@ +from langflow.custom import CustomComponent +from langflow.schema import Data + + +class ExtractKeyFromDataComponent(CustomComponent): + display_name = "Extract Key From Data" + description = "Extracts a key from a data." + beta: bool = True + name = "ExtractKeyFromData" + + field_config = { + "data": {"display_name": "Data"}, + "keys": { + "display_name": "Keys", + "info": "The keys to extract from the data.", + "input_types": [], + }, + "silent_error": { + "display_name": "Silent Errors", + "info": "If True, errors will not be raised.", + "advanced": True, + }, + } + + def build(self, data: Data, keys: list[str], *, silent_error: bool = True) -> Data: + """Extracts the keys from a data. + + Args: + data (Data): The data from which to extract the keys. + keys (list[str]): The keys to extract from the data. + silent_error (bool): If True, errors will not be raised. + + Returns: + dict: The extracted keys. + """ + extracted_keys = {} + for key in keys: + try: + extracted_keys[key] = getattr(data, key) + except AttributeError as e: + if not silent_error: + msg = f"The key '{key}' does not exist in the data." + raise KeyError(msg) from e + return_data = Data(data=extracted_keys) + self.status = return_data + return return_data diff --git a/src/backend/base/langflow/components/deactivated/list_flows.py b/src/backend/base/langflow/components/deactivated/list_flows.py new file mode 100644 index 000000000000..5e18e9cfa5e3 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/list_flows.py @@ -0,0 +1,20 @@ +from langflow.custom import CustomComponent +from langflow.schema import Data + + +class ListFlowsComponent(CustomComponent): + display_name = "List Flows" + description = "A component to list all available flows." + icon = "ListFlows" + beta: bool = True + name = "ListFlows" + + def build_config(self): + return {} + + def build( + self, + ) -> list[Data]: + flows = self.list_flows() + self.status = flows + return flows diff --git a/src/backend/base/langflow/components/deactivated/merge_data.py b/src/backend/base/langflow/components/deactivated/merge_data.py new file mode 100644 index 000000000000..853801317946 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/merge_data.py @@ -0,0 +1,94 @@ +from loguru import logger + +from langflow.custom import Component +from langflow.io import DataInput, Output +from langflow.schema import Data + + +class MergeDataComponent(Component): + """MergeDataComponent is responsible for combining multiple Data objects into a unified list of Data objects. + + It ensures that all keys across the input Data objects are present in each merged Data object. + Missing keys are filled with empty strings to maintain consistency. + """ + + display_name = "Merge Data" + description = ( + "Combines multiple Data objects into a unified list, ensuring all keys are present in each Data object." + ) + icon = "merge" + + inputs = [ + DataInput( + name="data_inputs", + display_name="Data Inputs", + is_list=True, + info="A list of Data inputs objects to be merged.", + ), + ] + + outputs = [ + Output( + display_name="Merged Data", + name="merged_data", + method="merge_data", + ), + ] + + def merge_data(self) -> list[Data]: + """Merges multiple Data objects into a single list of Data objects. + + Ensures that all keys from the input Data objects are present in each merged Data object. + Missing keys are filled with empty strings. + + Returns: + List[Data]: A list of merged Data objects with consistent keys. + """ + logger.info("Initiating the data merging process.") + + data_inputs: list[Data] = self.data_inputs + logger.debug(f"Received {len(data_inputs)} data input(s) for merging.") + + if not data_inputs: + logger.warning("No data inputs provided. Returning an empty list.") + return [] + + # Collect all unique keys from all Data objects + all_keys: set[str] = set() + for idx, data_input in enumerate(data_inputs): + if not isinstance(data_input, Data): + error_message = f"Data input at index {idx} is not of type Data." + logger.error(error_message) + type_error_message = ( + f"All items in data_inputs must be of type Data. Item at index {idx} is {type(data_input)}" + ) + raise TypeError(type_error_message) + all_keys.update(data_input.data.keys()) + logger.debug(f"Collected {len(all_keys)} unique key(s) from input data.") + + try: + # Create new list of Data objects with missing keys filled with empty strings + merged_data_list = [] + for idx, data_input in enumerate(data_inputs): + merged_data_dict = {} + + for key in all_keys: + # Use the existing value if the key exists, otherwise use an empty string + value = data_input.data.get(key, "") + if key not in data_input.data: + log_message = f"Key '{key}' missing in data input at index {idx}. " "Assigning empty string." + logger.debug(log_message) + merged_data_dict[key] = value + + merged_data = Data( + text_key=data_input.text_key, data=merged_data_dict, default_value=data_input.default_value + ) + merged_data_list.append(merged_data) + logger.debug(f"Merged Data object created for input at index {idx}.") + + except Exception: + logger.exception("An error occurred during the data merging process.") + raise + + logger.info("Data merging process completed successfully.") + return merged_data_list diff --git a/src/backend/base/langflow/components/deactivated/message.py b/src/backend/base/langflow/components/deactivated/message.py new file mode 100644 index 000000000000..530ced889f3e --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/message.py @@ -0,0 +1,37 @@ +from langflow.custom import CustomComponent +from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER + + +class MessageComponent(CustomComponent): + display_name = "Message" + description = "Creates a Message object given a Session ID." + name = "Message" + + def build_config(self): + return { + "sender": { + "options": [MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], + "display_name": "Sender Type", + }, + "sender_name": {"display_name": "Sender Name"}, + "text": {"display_name": "Text"}, + "session_id": { + "display_name": "Session ID", + "info": "Session ID of the chat history.", + "input_types": ["Text"], + }, + } + + def build( + self, + sender: str = MESSAGE_SENDER_USER, + sender_name: str | None = None, + session_id: str | None = None, + text: str = "", + ) -> Message: + flow_id = self.graph.flow_id if hasattr(self, "graph") else None + message = Message(text=text, sender=sender, sender_name=sender_name, flow_id=flow_id, session_id=session_id) + + self.status = message + return message diff --git a/src/backend/base/langflow/components/deactivated/selective_passthrough.py b/src/backend/base/langflow/components/deactivated/selective_passthrough.py new file mode 100644 index 000000000000..f82842993659 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/selective_passthrough.py @@ -0,0 +1,77 @@ +from langflow.custom import Component +from langflow.field_typing import Text +from langflow.io import BoolInput, DropdownInput, MessageTextInput, Output + + +class SelectivePassThroughComponent(Component): + display_name = "Selective Pass Through" + description = "Passes the specified value if a specified condition is met." + icon = "filter" + name = "SelectivePassThrough" + + inputs = [ + MessageTextInput( + name="input_value", + display_name="Input Value", + info="The primary input value to evaluate.", + ), + MessageTextInput( + name="comparison_value", + display_name="Comparison Value", + info="The value to compare against the input value.", + ), + DropdownInput( + name="operator", + display_name="Operator", + options=["equals", "not equals", "contains", "starts with", "ends with"], + info="Condition to evaluate the input value.", + ), + MessageTextInput( + name="value_to_pass", + display_name="Value to Pass", + info="The value to pass if the condition is met.", + ), + BoolInput( + name="case_sensitive", + display_name="Case Sensitive", + info="If true, the comparison will be case sensitive.", + value=False, + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Passed Output", name="passed_output", method="pass_through"), + ] + + def evaluate_condition( + self, input_value: str, comparison_value: str, operator: str, *, case_sensitive: bool + ) -> bool: + if not case_sensitive: + input_value = input_value.lower() + comparison_value = comparison_value.lower() + + if operator == "equals": + return input_value == comparison_value + if operator == "not equals": + return input_value != comparison_value + if operator == "contains": + return comparison_value in input_value + if operator == "starts with": + return input_value.startswith(comparison_value) + if operator == "ends with": + return input_value.endswith(comparison_value) + return False + + def pass_through(self) -> Text: + input_value = self.input_value + comparison_value = self.comparison_value + operator = self.operator + value_to_pass = self.value_to_pass + case_sensitive = self.case_sensitive + + if self.evaluate_condition(input_value, comparison_value, operator, case_sensitive=case_sensitive): + self.status = value_to_pass + return value_to_pass + self.status = "" + return "" diff --git a/src/backend/base/langflow/components/deactivated/should_run_next.py b/src/backend/base/langflow/components/deactivated/should_run_next.py new file mode 100644 index 000000000000..a65687e8fc09 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/should_run_next.py @@ -0,0 +1,40 @@ +from langchain_core.messages import BaseMessage +from langchain_core.prompts import PromptTemplate + +from langflow.custom import CustomComponent +from langflow.field_typing import LanguageModel, Text + + +class ShouldRunNextComponent(CustomComponent): + display_name = "Should Run Next" + description = "Determines if a vertex is runnable." + name = "ShouldRunNext" + + def build(self, llm: LanguageModel, question: str, context: str, retries: int = 3) -> Text: + template = ( + "Given the following question and the context below, answer with a yes or no.\n\n" + "{error_message}\n\n" + "Question: {question}\n\n" # noqa: RUF100, RUF027 + "Context: {context}\n\n" # noqa: RUF100, RUF027 + "Answer:" + ) + + prompt = PromptTemplate.from_template(template) + chain = prompt | llm + error_message = "" + for _i in range(retries): + result = chain.invoke( + {"question": question, "context": context, "error_message": error_message}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + if isinstance(result, BaseMessage): + content = result.content + elif isinstance(result, str): + content = result + if isinstance(content, str) and content.lower().strip() in {"yes", "no"}: + break + condition = str(content).lower().strip() == "yes" + self.status = f"Should Run Next: {condition}" + if condition is False: + self.stop() + return context diff --git a/src/backend/base/langflow/components/deactivated/split_text.py b/src/backend/base/langflow/components/deactivated/split_text.py new file mode 100644 index 000000000000..36157538e0e8 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/split_text.py @@ -0,0 +1,63 @@ +from langchain_text_splitters import CharacterTextSplitter + +from langflow.custom import Component +from langflow.io import HandleInput, IntInput, MessageTextInput, Output +from langflow.schema import Data +from langflow.utils.util import unescape_string + + +class SplitTextComponent(Component): + display_name: str = "Split Text" + description: str = "Split text into chunks based on specified criteria." + icon = "scissors-line-dashed" + name = "SplitText" + + inputs = [ + HandleInput( + name="data_inputs", + display_name="Data Inputs", + info="The data to split.", + input_types=["Data"], + is_list=True, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="Number of characters to overlap between chunks.", + value=200, + ), + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum number of characters in each chunk.", + value=1000, + ), + MessageTextInput( + name="separator", + display_name="Separator", + info="The character to split on. Defaults to newline.", + value="\n", + ), + ] + + outputs = [ + Output(display_name="Chunks", name="chunks", method="split_text"), + ] + + def _docs_to_data(self, docs): + return [Data(text=doc.page_content, data=doc.metadata) for doc in docs] + + def split_text(self) -> list[Data]: + separator = unescape_string(self.separator) + + documents = [_input.to_lc_document() for _input in self.data_inputs if isinstance(_input, Data)] + + splitter = CharacterTextSplitter( + chunk_overlap=self.chunk_overlap, + chunk_size=self.chunk_size, + separator=separator, + ) + docs = splitter.split_documents(documents) + data = self._docs_to_data(docs) + self.status = data + return data diff --git a/src/backend/base/langflow/components/deactivated/store_message.py b/src/backend/base/langflow/components/deactivated/store_message.py new file mode 100644 index 000000000000..e983f4e0693e --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/store_message.py @@ -0,0 +1,24 @@ +from langflow.custom import CustomComponent +from langflow.memory import get_messages, store_message +from langflow.schema.message import Message + + +class StoreMessageComponent(CustomComponent): + display_name = "Store Message" + description = "Stores a chat message." + name = "StoreMessage" + + def build_config(self): + return { + "message": {"display_name": "Message"}, + } + + def build( + self, + message: Message, + ) -> Message: + flow_id = self.graph.flow_id if hasattr(self, "graph") else None + store_message(message, flow_id=flow_id) + self.status = get_messages() + + return message diff --git a/src/backend/base/langflow/components/deactivated/sub_flow.py b/src/backend/base/langflow/components/deactivated/sub_flow.py new file mode 100644 index 000000000000..d23abcfd2c69 --- /dev/null +++ b/src/backend/base/langflow/components/deactivated/sub_flow.py @@ -0,0 +1,125 @@ +from typing import TYPE_CHECKING, Any + +from loguru import logger + +from langflow.base.flow_processing.utils import build_data_from_result_data +from langflow.custom import CustomComponent +from langflow.graph.graph.base import Graph +from langflow.graph.vertex.base import Vertex +from langflow.helpers.flow import get_flow_inputs +from langflow.schema import Data +from langflow.schema.dotdict import dotdict +from langflow.template.field.base import Input + +if TYPE_CHECKING: + from langflow.graph.schema import RunOutputs + + +class SubFlowComponent(CustomComponent): + display_name = "Sub Flow" + description = ( + "Dynamically Generates a Component from a Flow. The output is a list of data with keys 'result' and 'message'." + ) + beta: bool = True + field_order = ["flow_name"] + name = "SubFlow" + + def get_flow_names(self) -> list[str]: + flow_datas = self.list_flows() + return [flow_data.data["name"] for flow_data in flow_datas] + + def get_flow(self, flow_name: str) -> Data | None: + flow_datas = self.list_flows() + for flow_data in flow_datas: + if flow_data.data["name"] == flow_name: + return flow_data + return None + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + logger.debug(f"Updating build config with field value {field_value} and field name {field_name}") + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + # Clean up the build config + for key in list(build_config.keys()): + if key not in {*self.field_order, "code", "_type", "get_final_results_only"}: + del build_config[key] + if field_value is not None and field_name == "flow_name": + try: + flow_data = self.get_flow(field_value) + except Exception: # noqa: BLE001 + logger.exception(f"Error getting flow {field_value}") + else: + if not flow_data: + msg = f"Flow {field_value} not found." + logger.error(msg) + else: + try: + graph = Graph.from_payload(flow_data.data["data"]) + # Get all inputs from the graph + inputs = get_flow_inputs(graph) + # Add inputs to the build config + build_config = self.add_inputs_to_build_config(inputs, build_config) + except Exception: # noqa: BLE001 + logger.exception(f"Error building graph for flow {field_value}") + + return build_config + + def add_inputs_to_build_config(self, inputs: list[Vertex], build_config: dotdict): + new_fields: list[Input] = [] + for vertex in inputs: + field = Input( + display_name=vertex.display_name, + name=vertex.id, + info=vertex.description, + field_type="str", + value=None, + ) + new_fields.append(field) + logger.debug(new_fields) + for field in new_fields: + build_config[field.name] = field.to_dict() + return build_config + + def build_config(self): + return { + "input_value": { + "display_name": "Input Value", + "multiline": True, + }, + "flow_name": { + "display_name": "Flow Name", + "info": "The name of the flow to run.", + "options": [], + "real_time_refresh": True, + "refresh_button": True, + }, + "tweaks": { + "display_name": "Tweaks", + "info": "Tweaks to apply to the flow.", + }, + "get_final_results_only": { + "display_name": "Get Final Results Only", + "info": "If False, the output will contain all outputs from the flow.", + "advanced": True, + }, + } + + async def build(self, flow_name: str, **kwargs) -> list[Data]: + tweaks = {key: {"input_value": value} for key, value in kwargs.items()} + run_outputs: list[RunOutputs | None] = await self.run_flow( + tweaks=tweaks, + flow_name=flow_name, + ) + if not run_outputs: + return [] + run_output = run_outputs[0] + + data = [] + if run_output is not None: + for output in run_output.outputs: + if output: + data.extend(build_data_from_result_data(output)) + + self.status = data + logger.debug(data) + return data diff --git a/src/backend/base/langflow/components/documentloaders/Confluence.py b/src/backend/base/langflow/components/documentloaders/Confluence.py deleted file mode 100644 index 66ff5f7fef28..000000000000 --- a/src/backend/base/langflow/components/documentloaders/Confluence.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import List -from langflow.custom import Component -from langflow.io import StrInput, SecretStrInput, BoolInput, DropdownInput, Output, IntInput -from langflow.schema import Data -from langchain_community.document_loaders import ConfluenceLoader -from langchain_community.document_loaders.confluence import ContentFormat - - -class ConfluenceComponent(Component): - display_name = "Confluence" - description = "Confluence wiki collaboration platform" - documentation = "https://python.langchain.com/v0.2/docs/integrations/document_loaders/confluence/" - trace_type = "tool" - icon = "Confluence" - name = "Confluence" - - inputs = [ - StrInput( - name="url", - display_name="Site URL", - required=True, - info="The base URL of the Confluence Space. Example: https://.atlassian.net/wiki.", - ), - StrInput( - name="username", - display_name="Username", - required=True, - info="Atlassian User E-mail. Example: email@example.com", - ), - SecretStrInput( - name="api_key", - display_name="API Key", - required=True, - info="Atlassian Key. Create at: https://id.atlassian.com/manage-profile/security/api-tokens", - ), - StrInput(name="space_key", display_name="Space Key", required=True), - BoolInput(name="cloud", display_name="Use Cloud?", required=True, value=True, advanced=True), - DropdownInput( - name="content_format", - display_name="Content Format", - options=[ - ContentFormat.EDITOR.value, - ContentFormat.EXPORT_VIEW.value, - ContentFormat.ANONYMOUS_EXPORT_VIEW.value, - ContentFormat.STORAGE.value, - ContentFormat.VIEW.value, - ], - value=ContentFormat.STORAGE.value, - required=True, - advanced=True, - info="Specify content format, defaults to ContentFormat.STORAGE", - ), - IntInput( - name="max_pages", - display_name="Max Pages", - required=False, - value=1000, - advanced=True, - info="Maximum number of pages to retrieve in total, defaults 1000", - ), - ] - - outputs = [ - Output(name="data", display_name="Data", method="load_documents"), - ] - - def build_confluence(self) -> ConfluenceLoader: - content_format = ContentFormat(self.content_format) - loader = ConfluenceLoader( - url=self.url, - username=self.username, - api_key=self.api_key, - cloud=self.cloud, - space_key=self.space_key, - content_format=content_format, - max_pages=self.max_pages, - ) - return loader - - def load_documents(self) -> List[Data]: - confluence = self.build_confluence() - documents = confluence.load() - data = [Data.from_document(doc) for doc in documents] # Using the from_document method of Data - self.status = data - return data diff --git a/src/backend/base/langflow/components/documentloaders/GitLoader.py b/src/backend/base/langflow/components/documentloaders/GitLoader.py deleted file mode 100644 index ea39d76bd0e9..000000000000 --- a/src/backend/base/langflow/components/documentloaders/GitLoader.py +++ /dev/null @@ -1,116 +0,0 @@ -from pathlib import Path -from typing import List -import re - -from langchain_community.document_loaders.git import GitLoader -from langflow.custom import Component -from langflow.io import MessageTextInput, Output -from langflow.schema import Data - - -class GitLoaderComponent(Component): - display_name = "GitLoader" - description = "Load files from a Git repository" - documentation = "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/" - trace_type = "tool" - icon = "GitLoader" - name = "GitLoader" - - inputs = [ - MessageTextInput( - name="repo_path", - display_name="Repository Path", - required=True, - info="The local path to the Git repository.", - ), - MessageTextInput( - name="clone_url", - display_name="Clone URL", - required=False, - info="The URL to clone the Git repository from.", - ), - MessageTextInput( - name="branch", - display_name="Branch", - required=False, - value="main", - info="The branch to load files from. Defaults to 'main'.", - ), - MessageTextInput( - name="file_filter", - display_name="File Filter", - required=False, - advanced=True, - info="A list of patterns to filter files. Example to include only .py files: '*.py'. " - "Example to exclude .py files: '!*.py'. Multiple patterns can be separated by commas.", - ), - MessageTextInput( - name="content_filter", - display_name="Content Filter", - required=False, - advanced=True, - info="A regex pattern to filter files based on their content.", - ), - ] - - outputs = [ - Output(name="data", display_name="Data", method="load_documents"), - ] - - @staticmethod - def is_binary(file_path: str) -> bool: - """ - Check if a file is binary by looking for null bytes. - This is necessary because when searches are performed using - the content_filter, binary files need to be ignored. - """ - with open(file_path, "rb") as file: - return b"\x00" in file.read(1024) - - def build_gitloader(self) -> GitLoader: - file_filter_patterns = getattr(self, "file_filter", None) - content_filter_pattern = getattr(self, "content_filter", None) - - file_filters = [] - if file_filter_patterns: - patterns = [pattern.strip() for pattern in file_filter_patterns.split(",")] - - def file_filter(file_path: Path) -> bool: - if len(patterns) == 1 and patterns[0].startswith("!"): - return not file_path.match(patterns[0][1:]) - included = any(file_path.match(pattern) for pattern in patterns if not pattern.startswith("!")) - excluded = any(file_path.match(pattern[1:]) for pattern in patterns if pattern.startswith("!")) - return included and not excluded - - file_filters.append(file_filter) - - if content_filter_pattern: - content_regex = re.compile(content_filter_pattern) - - def content_filter(file_path: Path) -> bool: - with file_path.open("r", encoding="utf-8", errors="ignore") as file: - content = file.read() - return bool(content_regex.search(content)) - - file_filters.append(content_filter) - - def combined_filter(file_path: str) -> bool: - path = Path(file_path) - if self.is_binary(file_path): - return False - return all(f(path) for f in file_filters) - - loader = GitLoader( - repo_path=self.repo_path, - clone_url=self.clone_url, - branch=self.branch, - file_filter=combined_filter, - ) - return loader - - def load_documents(self) -> List[Data]: - gitloader = self.build_gitloader() - documents = list(gitloader.lazy_load()) - data = [Data.from_document(doc) for doc in documents] - self.status = data - return data diff --git a/src/backend/base/langflow/components/documentloaders/Unstructured.py b/src/backend/base/langflow/components/documentloaders/Unstructured.py deleted file mode 100644 index 5fdf05c3caf6..000000000000 --- a/src/backend/base/langflow/components/documentloaders/Unstructured.py +++ /dev/null @@ -1,58 +0,0 @@ -import os - -from typing import List - -from langflow.custom import Component -from langflow.inputs import FileInput, SecretStrInput -from langflow.template import Output -from langflow.schema import Data - -from langchain_community.document_loaders.unstructured import UnstructuredFileLoader - - -class UnstructuredComponent(Component): - display_name = "Unstructured" - description = "Unstructured data loader" - documentation = "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/" - trace_type = "tool" - icon = "Unstructured" - name = "Unstructured" - - inputs = [ - FileInput( - name="file", - display_name="File", - required=True, - info="The path to the file with which you want to use Unstructured to parse", - file_types=["pdf", "docx", "txt"], # TODO: Support all unstructured file types - ), - SecretStrInput( - name="api_key", - display_name="API Key", - required=False, - info="Unstructured API Key. Create at: https://unstructured.io/ - If not provided, open source library will be used", - ), - ] - - outputs = [ - Output(name="data", display_name="Data", method="load_documents"), - ] - - def build_unstructured(self) -> UnstructuredFileLoader: - os.environ["UNSTRUCTURED_API_KEY"] = self.api_key - - file_paths = [self.file] - - loader = UnstructuredFileLoader(file_paths) - - return loader - - def load_documents(self) -> List[Data]: - unstructured = self.build_unstructured() - - documents = unstructured.load() - data = [Data.from_document(doc) for doc in documents] # Using the from_document method of Data - - self.status = data - - return data diff --git a/src/backend/base/langflow/components/documentloaders/__init__.py b/src/backend/base/langflow/components/documentloaders/__init__.py index 0bed3869d438..e69de29bb2d1 100644 --- a/src/backend/base/langflow/components/documentloaders/__init__.py +++ b/src/backend/base/langflow/components/documentloaders/__init__.py @@ -1,5 +0,0 @@ -from .Confluence import ConfluenceComponent -from .GitLoader import GitLoaderComponent -from .Unstructured import UnstructuredComponent - -__all__ = ["ConfluenceComponent", "GitLoaderComponent", "UnstructuredComponent"] diff --git a/src/backend/base/langflow/components/embeddings/AIMLEmbeddings.py b/src/backend/base/langflow/components/embeddings/AIMLEmbeddings.py deleted file mode 100644 index e52198ba43c3..000000000000 --- a/src/backend/base/langflow/components/embeddings/AIMLEmbeddings.py +++ /dev/null @@ -1,34 +0,0 @@ -from langflow.base.embeddings.model import LCEmbeddingsModel -from langflow.base.models.aiml_constants import AIML_EMBEDDING_MODELS -from langflow.components.embeddings.util.AIMLEmbeddingsImpl import AIMLEmbeddingsImpl -from langflow.field_typing import Embeddings -from langflow.inputs.inputs import DropdownInput -from langflow.io import SecretStrInput - - -class AIMLEmbeddingsComponent(LCEmbeddingsModel): - display_name = "AI/ML Embeddings" - description = "Generate embeddings using the AI/ML API." - icon = "AI/ML" - name = "AIMLEmbeddings" - - inputs = [ - DropdownInput( - name="model_name", - display_name="Model Name", - options=AIML_EMBEDDING_MODELS, - required=True, - ), - SecretStrInput( - name="aiml_api_key", - display_name="AI/ML API Key", - value="AIML_API_KEY", - required=True, - ), - ] - - def build_embeddings(self) -> Embeddings: - return AIMLEmbeddingsImpl( - api_key=self.aiml_api_key, - model=self.model_name, - ) diff --git a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py deleted file mode 100644 index 299073510a0a..000000000000 --- a/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py +++ /dev/null @@ -1,69 +0,0 @@ -from langchain_community.embeddings import BedrockEmbeddings - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.inputs import SecretStrInput -from langflow.io import DropdownInput, MessageTextInput, Output - - -class AmazonBedrockEmbeddingsComponent(LCModelComponent): - display_name: str = "Amazon Bedrock Embeddings" - description: str = "Generate embeddings using Amazon Bedrock models." - documentation = "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock" - icon = "Amazon" - name = "AmazonBedrockEmbeddings" - - inputs = [ - DropdownInput( - name="model_id", - display_name="Model Id", - options=["amazon.titan-embed-text-v1"], - value="amazon.titan-embed-text-v1", - ), - SecretStrInput(name="aws_access_key", display_name="Access Key"), - SecretStrInput(name="aws_secret_key", display_name="Secret Key"), - MessageTextInput( - name="credentials_profile_name", - display_name="Credentials Profile Name", - advanced=True, - ), - MessageTextInput(name="region_name", display_name="Region Name", value="us-east-1"), - MessageTextInput(name="endpoint_url", display_name=" Endpoint URL", advanced=True), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - if self.aws_access_key: - import boto3 # type: ignore - - session = boto3.Session( - aws_access_key_id=self.aws_access_key, - aws_secret_access_key=self.aws_secret_key, - ) - elif self.credentials_profile_name: - import boto3 - - session = boto3.Session(profile_name=self.credentials_profile_name) - else: - import boto3 - - session = boto3.Session() - - client_params = {} - if self.endpoint_url: - client_params["endpoint_url"] = self.endpoint_url - if self.region_name: - client_params["region_name"] = self.region_name - - boto3_client = session.client("bedrock-runtime", **client_params) - output = BedrockEmbeddings( - credentials_profile_name=self.credentials_profile_name, - client=boto3_client, - model_id=self.model_id, - endpoint_url=self.endpoint_url, - region_name=self.region_name, - ) # type: ignore - return output diff --git a/src/backend/base/langflow/components/embeddings/AstraVectorize.py b/src/backend/base/langflow/components/embeddings/AstraVectorize.py deleted file mode 100644 index 4de49eb758c9..000000000000 --- a/src/backend/base/langflow/components/embeddings/AstraVectorize.py +++ /dev/null @@ -1,116 +0,0 @@ -from typing import Any - -from langflow.custom import Component -from langflow.inputs.inputs import DictInput, DropdownInput, MessageTextInput, SecretStrInput -from langflow.template.field.base import Output - - -class AstraVectorizeComponent(Component): - display_name: str = "Astra Vectorize" - description: str = "Configuration options for Astra Vectorize server-side embeddings." - documentation: str = "https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html" - icon = "AstraDB" - name = "AstraVectorize" - - VECTORIZE_PROVIDERS_MAPPING = { - "Azure OpenAI": ["azureOpenAI", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], - "Hugging Face - Dedicated": ["huggingfaceDedicated", ["endpoint-defined-model"]], - "Hugging Face - Serverless": [ - "huggingface", - [ - "sentence-transformers/all-MiniLM-L6-v2", - "intfloat/multilingual-e5-large", - "intfloat/multilingual-e5-large-instruct", - "BAAI/bge-small-en-v1.5", - "BAAI/bge-base-en-v1.5", - "BAAI/bge-large-en-v1.5", - ], - ], - "Jina AI": [ - "jinaAI", - [ - "jina-embeddings-v2-base-en", - "jina-embeddings-v2-base-de", - "jina-embeddings-v2-base-es", - "jina-embeddings-v2-base-code", - "jina-embeddings-v2-base-zh", - ], - ], - "Mistral AI": ["mistral", ["mistral-embed"]], - "NVIDIA": ["nvidia", ["NV-Embed-QA"]], - "OpenAI": ["openai", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], - "Upstage": ["upstageAI", ["solar-embedding-1-large"]], - "Voyage AI": [ - "voyageAI", - ["voyage-large-2-instruct", "voyage-law-2", "voyage-code-2", "voyage-large-2", "voyage-2"], - ], - } - VECTORIZE_MODELS_STR = "\n\n".join( - [provider + ": " + (", ".join(models[1])) for provider, models in VECTORIZE_PROVIDERS_MAPPING.items()] - ) - - inputs = [ - DropdownInput( - name="provider", - display_name="Provider", - options=VECTORIZE_PROVIDERS_MAPPING.keys(), - value="", - required=True, - ), - MessageTextInput( - name="model_name", - display_name="Model Name", - info=f"The embedding model to use for the selected provider. Each provider has a different set of models " - f"available (full list at https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n{VECTORIZE_MODELS_STR}", - required=True, - ), - MessageTextInput( - name="api_key_name", - display_name="API Key name", - info="The name of the embeddings provider API key stored on Astra. If set, it will override the 'ProviderKey' in the authentication parameters.", - ), - DictInput( - name="authentication", - display_name="Authentication parameters", - is_list=True, - advanced=True, - ), - SecretStrInput( - name="provider_api_key", - display_name="Provider API Key", - info="An alternative to the Astra Authentication that passes an API key for the provider with each request to Astra DB. This may be used when Vectorize is configured for the collection, but no corresponding provider secret is stored within Astra's key management system.", - advanced=True, - ), - DictInput( - name="authentication", - display_name="Authentication Parameters", - is_list=True, - advanced=True, - ), - DictInput( - name="model_parameters", - display_name="Model Parameters", - advanced=True, - is_list=True, - ), - ] - outputs = [ - Output(display_name="Vectorize", name="config", method="build_options", types=["dict"]), - ] - - def build_options(self) -> dict[str, Any]: - provider_value = self.VECTORIZE_PROVIDERS_MAPPING[self.provider][0] - authentication = {**(self.authentication or {})} - api_key_name = self.api_key_name - if api_key_name: - authentication["providerKey"] = api_key_name - return { - # must match astrapy.info.CollectionVectorServiceOptions - "collection_vector_service_options": { - "provider": provider_value, - "modelName": self.model_name, - "authentication": authentication, - "parameters": self.model_parameters or {}, - }, - "collection_embedding_api_key": self.provider_api_key, - } diff --git a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py deleted file mode 100644 index c988ec6715d7..000000000000 --- a/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py +++ /dev/null @@ -1,72 +0,0 @@ -from langchain_openai import AzureOpenAIEmbeddings - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput - - -class AzureOpenAIEmbeddingsComponent(LCModelComponent): - display_name: str = "Azure OpenAI Embeddings" - description: str = "Generate embeddings using Azure OpenAI models." - documentation: str = "https://python.langchain.com/docs/integrations/text_embedding/azureopenai" - icon = "Azure" - name = "AzureOpenAIEmbeddings" - - API_VERSION_OPTIONS = [ - "2022-12-01", - "2023-03-15-preview", - "2023-05-15", - "2023-06-01-preview", - "2023-07-01-preview", - "2023-08-01-preview", - ] - - inputs = [ - MessageTextInput( - name="azure_endpoint", - display_name="Azure Endpoint", - required=True, - info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`", - ), - MessageTextInput( - name="azure_deployment", - display_name="Deployment Name", - required=True, - ), - DropdownInput( - name="api_version", - display_name="API Version", - options=API_VERSION_OPTIONS, - value=API_VERSION_OPTIONS[-1], - advanced=True, - ), - SecretStrInput( - name="api_key", - display_name="API Key", - required=True, - ), - IntInput( - name="dimensions", - display_name="Dimensions", - info="The number of dimensions the resulting output embeddings should have. Only supported by certain models.", - advanced=True, - ), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - try: - embeddings = AzureOpenAIEmbeddings( - azure_endpoint=self.azure_endpoint, - azure_deployment=self.azure_deployment, - api_version=self.api_version, - api_key=self.api_key, - dimensions=self.dimensions or None, - ) - except Exception as e: - raise ValueError(f"Could not connect to AzureOpenAIEmbeddings API: {str(e)}") from e - - return embeddings diff --git a/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py deleted file mode 100644 index 6c72563389b9..000000000000 --- a/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py +++ /dev/null @@ -1,46 +0,0 @@ -from langchain_community.embeddings.cohere import CohereEmbeddings - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, Output, SecretStrInput - - -class CohereEmbeddingsComponent(LCModelComponent): - display_name = "Cohere Embeddings" - description = "Generate embeddings using Cohere models." - icon = "Cohere" - name = "CohereEmbeddings" - - inputs = [ - SecretStrInput(name="cohere_api_key", display_name="Cohere API Key"), - DropdownInput( - name="model", - display_name="Model", - advanced=True, - options=[ - "embed-english-v2.0", - "embed-multilingual-v2.0", - "embed-english-light-v2.0", - "embed-multilingual-light-v2.0", - ], - value="embed-english-v2.0", - ), - MessageTextInput(name="truncate", display_name="Truncate", advanced=True), - IntInput(name="max_retries", display_name="Max Retries", value=3, advanced=True), - MessageTextInput(name="user_agent", display_name="User Agent", advanced=True, value="langchain"), - FloatInput(name="request_timeout", display_name="Request Timeout", advanced=True), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - return CohereEmbeddings( # type: ignore - cohere_api_key=self.cohere_api_key, - model=self.model, - truncate=self.truncate, - max_retries=self.max_retries, - user_agent=self.user_agent, - request_timeout=self.request_timeout or None, - ) diff --git a/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py deleted file mode 100644 index 998764ad6e49..000000000000 --- a/src/backend/base/langflow/components/embeddings/GoogleGenerativeAIEmbeddings.py +++ /dev/null @@ -1,124 +0,0 @@ -# from langflow.field_typing import Data -from langflow.custom import Component -from langflow.io import MessageTextInput, Output, SecretStrInput -from langchain_google_genai import GoogleGenerativeAIEmbeddings - -from typing import List, Optional - -# TODO: remove ignore once the google package is published with types -from google.ai.generativelanguage_v1beta.types import ( - BatchEmbedContentsRequest, -) -from langchain_core.embeddings import Embeddings - -from langchain_google_genai._common import ( - GoogleGenerativeAIError, -) - -import numpy as np - - -class GoogleGenerativeAIEmbeddingsComponent(Component): - display_name = "Custom Component" - description = "Use as a template to create your own component." - documentation: str = "http://docs.langflow.org/components/custom" - icon = "custom_components" - name = "CustomComponent" - - inputs = [ - SecretStrInput(name="api_key", display_name="API Key"), - MessageTextInput(name="model_name", display_name="Model Name", value="models/text-embedding-004"), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - if not self.api_key: - raise ValueError("API Key is required") - - class HotaGoogleGenerativeAIEmbeddings(GoogleGenerativeAIEmbeddings): - def __init__(self, *args, **kwargs): - super(GoogleGenerativeAIEmbeddings, self).__init__(*args, **kwargs) - - def embed_documents( - self, - texts: List[str], - *, - batch_size: int = 100, - task_type: Optional[str] = None, - titles: Optional[List[str]] = None, - output_dimensionality: Optional[int] = 1536, - ) -> List[List[float]]: - """Embed a list of strings. Google Generative AI currently - sets a max batch size of 100 strings. - - Args: - texts: List[str] The list of strings to embed. - batch_size: [int] The batch size of embeddings to send to the model - task_type: task_type (https://ai.google.dev/api/rest/v1/TaskType) - titles: An optional list of titles for texts provided. - Only applicable when TaskType is RETRIEVAL_DOCUMENT. - output_dimensionality: Optional reduced dimension for the output embedding. - https://ai.google.dev/api/rest/v1/models/batchEmbedContents#EmbedContentRequest - Returns: - List of embeddings, one for each text. - """ - embeddings: List[List[float]] = [] - batch_start_index = 0 - for batch in GoogleGenerativeAIEmbeddings._prepare_batches(texts, batch_size): - if titles: - titles_batch = titles[batch_start_index : batch_start_index + len(batch)] - batch_start_index += len(batch) - else: - titles_batch = [None] * len(batch) # type: ignore[list-item] - - requests = [ - self._prepare_request( - text=text, - task_type=task_type, - title=title, - output_dimensionality=1536, - ) - for text, title in zip(batch, titles_batch) - ] - - try: - result = self.client.batch_embed_contents( - BatchEmbedContentsRequest(requests=requests, model=self.model) - ) - except Exception as e: - raise GoogleGenerativeAIError(f"Error embedding content: {e}") from e - embeddings.extend([list(np.pad(e.values, (0, 768), "constant")) for e in result.embeddings]) - return embeddings - - def embed_query( - self, - text: str, - task_type: Optional[str] = None, - title: Optional[str] = None, - output_dimensionality: Optional[int] = 1536, - ) -> List[float]: - """Embed a text. - - Args: - text: The text to embed. - task_type: task_type (https://ai.google.dev/api/rest/v1/TaskType) - title: An optional title for the text. - Only applicable when TaskType is RETRIEVAL_DOCUMENT. - output_dimensionality: Optional reduced dimension for the output embedding. - https://ai.google.dev/api/rest/v1/models/batchEmbedContents#EmbedContentRequest - - Returns: - Embedding for the text. - """ - task_type = self.task_type or "RETRIEVAL_QUERY" - return self.embed_documents( - [text], - task_type=task_type, - titles=[title] if title else None, - output_dimensionality=1536, - )[0] - - return HotaGoogleGenerativeAIEmbeddings(model=self.model_name, google_api_key=self.api_key) diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py deleted file mode 100644 index b2b15c6ef37f..000000000000 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py +++ /dev/null @@ -1,32 +0,0 @@ -from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings -from pydantic.v1.types import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import MessageTextInput, Output, SecretStrInput - - -class HuggingFaceInferenceAPIEmbeddingsComponent(LCModelComponent): - display_name = "HuggingFace Embeddings" - description = "Generate embeddings using Hugging Face Inference API models." - documentation = "https://github.com/huggingface/text-embeddings-inference" - icon = "HuggingFace" - name = "HuggingFaceInferenceAPIEmbeddings" - - inputs = [ - SecretStrInput(name="api_key", display_name="API Key"), - MessageTextInput(name="api_url", display_name="API URL", advanced=True, value="http://localhost:8080"), - MessageTextInput(name="model_name", display_name="Model Name", value="BAAI/bge-large-en-v1.5"), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - if not self.api_key: - raise ValueError("API Key is required") - - api_key = SecretStr(self.api_key) - - return HuggingFaceInferenceAPIEmbeddings(api_key=api_key, api_url=self.api_url, model_name=self.model_name) diff --git a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py deleted file mode 100644 index f46a81f96ef9..000000000000 --- a/src/backend/base/langflow/components/embeddings/MistalAIEmbeddings.py +++ /dev/null @@ -1,57 +0,0 @@ -from langchain_mistralai.embeddings import MistralAIEmbeddings -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput - - -class MistralAIEmbeddingsComponent(LCModelComponent): - display_name = "MistralAI Embeddings" - description = "Generate embeddings using MistralAI models." - icon = "MistralAI" - name = "MistalAIEmbeddings" - - inputs = [ - DropdownInput( - name="model", - display_name="Model", - advanced=False, - options=["mistral-embed"], - value="mistral-embed", - ), - SecretStrInput(name="mistral_api_key", display_name="Mistral API Key"), - IntInput( - name="max_concurrent_requests", - display_name="Max Concurrent Requests", - advanced=True, - value=64, - ), - IntInput(name="max_retries", display_name="Max Retries", advanced=True, value=5), - IntInput(name="timeout", display_name="Request Timeout", advanced=True, value=120), - MessageTextInput( - name="endpoint", - display_name="API Endpoint", - advanced=True, - value="https://api.mistral.ai/v1/", - ), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - if not self.mistral_api_key: - raise ValueError("Mistral API Key is required") - - api_key = SecretStr(self.mistral_api_key) - - return MistralAIEmbeddings( - api_key=api_key, - model=self.model, - endpoint=self.endpoint, - max_concurrent_requests=self.max_concurrent_requests, - max_retries=self.max_retries, - timeout=self.timeout, - ) diff --git a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py b/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py deleted file mode 100644 index 31a3450d8aa4..000000000000 --- a/src/backend/base/langflow/components/embeddings/NVIDIAEmbeddings.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import Any - -from langflow.base.embeddings.model import LCEmbeddingsModel -from langflow.field_typing import Embeddings -from langflow.inputs.inputs import DropdownInput, SecretStrInput -from langflow.io import FloatInput, MessageTextInput -from langflow.schema.dotdict import dotdict - - -class NVIDIAEmbeddingsComponent(LCEmbeddingsModel): - display_name: str = "NVIDIA Embeddings" - description: str = "Generate embeddings using NVIDIA models." - icon = "NVIDIA" - - inputs = [ - DropdownInput( - name="model", - display_name="Model", - options=[ - "nvidia/nv-embed-v1", - "snowflake/arctic-embed-I", - ], - value="nvidia/nv-embed-v1", - ), - MessageTextInput( - name="base_url", - display_name="NVIDIA Base URL", - refresh_button=True, - value="https://integrate.api.nvidia.com/v1", - ), - SecretStrInput( - name="nvidia_api_key", - display_name="NVIDIA API Key", - info="The NVIDIA API Key.", - advanced=False, - value="NVIDIA_API_KEY", - ), - FloatInput( - name="temperature", - display_name="Model Temperature", - value=0.1, - advanced=True, - ), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "base_url" and field_value: - try: - build_model = self.build_embeddings() - ids = [model.id for model in build_model.available_models] # type: ignore - build_config["model"]["options"] = ids - build_config["model"]["value"] = ids[0] - except Exception as e: - raise ValueError(f"Error getting model names: {e}") - return build_config - - def build_embeddings(self) -> Embeddings: - try: - from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings - except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the Nvidia model.") - try: - output = NVIDIAEmbeddings( - model=self.model, - base_url=self.base_url, - temperature=self.temperature, - nvidia_api_key=self.nvidia_api_key, - ) # type: ignore - except Exception as e: - raise ValueError(f"Could not connect to NVIDIA API. Error: {e}") from e - return output diff --git a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py deleted file mode 100644 index 07ae73a533f1..000000000000 --- a/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py +++ /dev/null @@ -1,47 +0,0 @@ -from langchain_community.embeddings import OllamaEmbeddings - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import FloatInput, MessageTextInput, Output - - -class OllamaEmbeddingsComponent(LCModelComponent): - display_name: str = "Ollama Embeddings" - description: str = "Generate embeddings using Ollama models." - documentation = "https://python.langchain.com/docs/integrations/text_embedding/ollama" - icon = "Ollama" - name = "OllamaEmbeddings" - - inputs = [ - MessageTextInput( - name="model", - display_name="Ollama Model", - value="llama3.1", - ), - MessageTextInput( - name="base_url", - display_name="Ollama Base URL", - value="http://localhost:11434", - ), - FloatInput( - name="temperature", - display_name="Model Temperature", - value=0.1, - advanced=True, - ), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - try: - output = OllamaEmbeddings( - model=self.model, - base_url=self.base_url, - temperature=self.temperature, - ) # type: ignore - except Exception as e: - raise ValueError("Could not connect to Ollama API.") from e - return output diff --git a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py deleted file mode 100644 index f37ee76413a7..000000000000 --- a/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py +++ /dev/null @@ -1,98 +0,0 @@ -from langchain_openai.embeddings.base import OpenAIEmbeddings - -from langflow.base.embeddings.model import LCEmbeddingsModel -from langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES -from langflow.field_typing import Embeddings -from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput - - -class OpenAIEmbeddingsComponent(LCEmbeddingsModel): - display_name = "OpenAI Embeddings" - description = "Generate embeddings using OpenAI models." - icon = "OpenAI" - name = "OpenAIEmbeddings" - - inputs = [ - DictInput( - name="default_headers", - display_name="Default Headers", - advanced=True, - info="Default headers to use for the API request.", - ), - DictInput( - name="default_query", - display_name="Default Query", - advanced=True, - info="Default query parameters to use for the API request.", - ), - IntInput(name="chunk_size", display_name="Chunk Size", advanced=True, value=1000), - MessageTextInput(name="client", display_name="Client", advanced=True), - MessageTextInput(name="deployment", display_name="Deployment", advanced=True), - IntInput(name="embedding_ctx_length", display_name="Embedding Context Length", advanced=True, value=1536), - IntInput(name="max_retries", display_name="Max Retries", value=3, advanced=True), - DropdownInput( - name="model", - display_name="Model", - advanced=False, - options=OPENAI_EMBEDDING_MODEL_NAMES, - value="text-embedding-3-small", - ), - DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), - SecretStrInput(name="openai_api_base", display_name="OpenAI API Base", advanced=True), - SecretStrInput(name="openai_api_key", display_name="OpenAI API Key", value="OPENAI_API_KEY"), - SecretStrInput(name="openai_api_type", display_name="OpenAI API Type", advanced=True), - MessageTextInput(name="openai_api_version", display_name="OpenAI API Version", advanced=True), - MessageTextInput( - name="openai_organization", - display_name="OpenAI Organization", - advanced=True, - ), - MessageTextInput(name="openai_proxy", display_name="OpenAI Proxy", advanced=True), - FloatInput(name="request_timeout", display_name="Request Timeout", advanced=True), - BoolInput(name="show_progress_bar", display_name="Show Progress Bar", advanced=True), - BoolInput(name="skip_empty", display_name="Skip Empty", advanced=True), - MessageTextInput( - name="tiktoken_model_name", - display_name="TikToken Model Name", - advanced=True, - ), - BoolInput( - name="tiktoken_enable", - display_name="TikToken Enable", - advanced=True, - value=True, - info="If False, you must have transformers installed.", - ), - IntInput( - name="dimensions", - display_name="Dimensions", - info="The number of dimensions the resulting output embeddings should have. Only supported by certain models.", - advanced=True, - ), - ] - - def build_embeddings(self) -> Embeddings: - return OpenAIEmbeddings( - tiktoken_enabled=self.tiktoken_enable, - default_headers=self.default_headers, - default_query=self.default_query, - allowed_special="all", - disallowed_special="all", - chunk_size=self.chunk_size, - deployment=self.deployment, - embedding_ctx_length=self.embedding_ctx_length, - max_retries=self.max_retries, - model=self.model, - model_kwargs=self.model_kwargs, - base_url=self.openai_api_base, - api_key=self.openai_api_key, - openai_api_type=self.openai_api_type, - api_version=self.openai_api_version, - organization=self.openai_organization, - openai_proxy=self.openai_proxy, - timeout=self.request_timeout or None, - show_progress_bar=self.show_progress_bar, - skip_empty=self.skip_empty, - tiktoken_model_name=self.tiktoken_model_name, - dimensions=self.dimensions or None, - ) diff --git a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py deleted file mode 100644 index 5628203d8dbc..000000000000 --- a/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py +++ /dev/null @@ -1,67 +0,0 @@ -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import BoolInput, FileInput, FloatInput, IntInput, MessageTextInput, Output - - -class VertexAIEmbeddingsComponent(LCModelComponent): - display_name = "VertexAI Embeddings" - description = "Generate embeddings using Google Cloud VertexAI models." - icon = "VertexAI" - name = "VertexAIEmbeddings" - - inputs = [ - FileInput( - name="credentials", - display_name="Credentials", - info="JSON credentials file. Leave empty to fallback to environment variables", - value="", - file_types=["json"], - ), - MessageTextInput(name="location", display_name="Location", value="us-central1", advanced=True), - MessageTextInput(name="project", display_name="Project", info="The project ID.", advanced=True), - IntInput(name="max_output_tokens", display_name="Max Output Tokens", advanced=True), - IntInput(name="max_retries", display_name="Max Retries", value=1, advanced=True), - MessageTextInput(name="model_name", display_name="Model Name", value="textembedding-gecko"), - IntInput(name="n", display_name="N", value=1, advanced=True), - IntInput(name="request_parallelism", value=5, display_name="Request Parallelism", advanced=True), - MessageTextInput(name="stop_sequences", display_name="Stop", advanced=True, is_list=True), - BoolInput(name="streaming", display_name="Streaming", value=False, advanced=True), - FloatInput(name="temperature", value=0.0, display_name="Temperature"), - IntInput(name="top_k", display_name="Top K", advanced=True), - FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - try: - from langchain_google_vertexai import VertexAIEmbeddings - except ImportError: - raise ImportError( - "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." - ) - - from google.oauth2 import service_account - - if self.credentials: - gcloud_credentials = service_account.Credentials.from_service_account_file(self.credentials) - else: - # will fallback to environment variable or inferred from gcloud CLI - gcloud_credentials = None - return VertexAIEmbeddings( - credentials=gcloud_credentials, - location=self.location, - max_output_tokens=self.max_output_tokens or None, - max_retries=self.max_retries, - model_name=self.model_name, - n=self.n, - project=self.project, - request_parallelism=self.request_parallelism, - stop=self.stop_sequences or None, - streaming=self.streaming, - temperature=self.temperature, - top_k=self.top_k or None, - top_p=self.top_p, - ) diff --git a/src/backend/base/langflow/components/embeddings/__init__.py b/src/backend/base/langflow/components/embeddings/__init__.py index 4aacee31b86b..100ff9ade10f 100644 --- a/src/backend/base/langflow/components/embeddings/__init__.py +++ b/src/backend/base/langflow/components/embeddings/__init__.py @@ -1,13 +1,18 @@ -from .AIMLEmbeddings import AIMLEmbeddingsComponent -from .AmazonBedrockEmbeddings import AmazonBedrockEmbeddingsComponent -from .AstraVectorize import AstraVectorizeComponent -from .AzureOpenAIEmbeddings import AzureOpenAIEmbeddingsComponent -from .CohereEmbeddings import CohereEmbeddingsComponent -from .HuggingFaceInferenceAPIEmbeddings import HuggingFaceInferenceAPIEmbeddingsComponent -from .OllamaEmbeddings import OllamaEmbeddingsComponent -from .OpenAIEmbeddings import OpenAIEmbeddingsComponent -from .VertexAIEmbeddings import VertexAIEmbeddingsComponent -from .GoogleGenerativeAIEmbeddings import GoogleGenerativeAIEmbeddingsComponent +from .aiml import AIMLEmbeddingsComponent +from .amazon_bedrock import AmazonBedrockEmbeddingsComponent +from .astra_vectorize import AstraVectorizeComponent +from .azure_openai import AzureOpenAIEmbeddingsComponent +from .cohere import CohereEmbeddingsComponent +from .google_generative_ai import GoogleGenerativeAIEmbeddingsComponent +from .huggingface_inference_api import HuggingFaceInferenceAPIEmbeddingsComponent +from .lmstudioembeddings import LMStudioEmbeddingsComponent +from .mistral import MistralAIEmbeddingsComponent +from .nvidia import NVIDIAEmbeddingsComponent +from .ollama import OllamaEmbeddingsComponent +from .openai import OpenAIEmbeddingsComponent +from .similarity import EmbeddingSimilarityComponent +from .text_embedder import TextEmbedderComponent +from .vertexai import VertexAIEmbeddingsComponent __all__ = [ "AIMLEmbeddingsComponent", @@ -15,9 +20,14 @@ "AstraVectorizeComponent", "AzureOpenAIEmbeddingsComponent", "CohereEmbeddingsComponent", + "GoogleGenerativeAIEmbeddingsComponent", "HuggingFaceInferenceAPIEmbeddingsComponent", + "LMStudioEmbeddingsComponent", + "MistralAIEmbeddingsComponent", + "NVIDIAEmbeddingsComponent", "OllamaEmbeddingsComponent", "OpenAIEmbeddingsComponent", + "EmbeddingSimilarityComponent", + "TextEmbedderComponent", "VertexAIEmbeddingsComponent", - "GoogleGenerativeAIEmbeddingsComponent", ] diff --git a/src/backend/base/langflow/components/embeddings/aiml.py b/src/backend/base/langflow/components/embeddings/aiml.py new file mode 100644 index 000000000000..fcdab9aaeeaf --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/aiml.py @@ -0,0 +1,34 @@ +from langflow.base.embeddings.aiml_embeddings import AIMLEmbeddingsImpl +from langflow.base.embeddings.model import LCEmbeddingsModel +from langflow.base.models.aiml_constants import AIML_EMBEDDING_MODELS +from langflow.field_typing import Embeddings +from langflow.inputs.inputs import DropdownInput +from langflow.io import SecretStrInput + + +class AIMLEmbeddingsComponent(LCEmbeddingsModel): + display_name = "AI/ML Embeddings" + description = "Generate embeddings using the AI/ML API." + icon = "AI/ML" + name = "AIMLEmbeddings" + + inputs = [ + DropdownInput( + name="model_name", + display_name="Model Name", + options=AIML_EMBEDDING_MODELS, + required=True, + ), + SecretStrInput( + name="aiml_api_key", + display_name="AI/ML API Key", + value="AIML_API_KEY", + required=True, + ), + ] + + def build_embeddings(self) -> Embeddings: + return AIMLEmbeddingsImpl( + api_key=self.aiml_api_key, + model=self.model_name, + ) diff --git a/src/backend/base/langflow/components/embeddings/amazon_bedrock.py b/src/backend/base/langflow/components/embeddings/amazon_bedrock.py new file mode 100644 index 000000000000..caeafc91aa1a --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/amazon_bedrock.py @@ -0,0 +1,107 @@ +from langflow.base.models.aws_constants import AWS_EMBEDDING_MODEL_IDS, AWS_REGIONS +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Embeddings +from langflow.inputs import SecretStrInput +from langflow.io import DropdownInput, MessageTextInput, Output + + +class AmazonBedrockEmbeddingsComponent(LCModelComponent): + display_name: str = "Amazon Bedrock Embeddings" + description: str = "Generate embeddings using Amazon Bedrock models." + icon = "Amazon" + name = "AmazonBedrockEmbeddings" + + inputs = [ + DropdownInput( + name="model_id", + display_name="Model Id", + options=AWS_EMBEDDING_MODEL_IDS, + value="amazon.titan-embed-text-v1", + ), + SecretStrInput( + name="aws_access_key_id", + display_name="AWS Access Key ID", + info="The access key for your AWS account." + "Usually set in Python code as the environment variable 'AWS_ACCESS_KEY_ID'.", + value="AWS_ACCESS_KEY_ID", + ), + SecretStrInput( + name="aws_secret_access_key", + display_name="AWS Secret Access Key", + info="The secret key for your AWS account. " + "Usually set in Python code as the environment variable 'AWS_SECRET_ACCESS_KEY'.", + value="AWS_SECRET_ACCESS_KEY", + ), + SecretStrInput( + name="aws_session_token", + display_name="AWS Session Token", + advanced=False, + info="The session key for your AWS account. " + "Only needed for temporary credentials. " + "Usually set in Python code as the environment variable 'AWS_SESSION_TOKEN'.", + value="AWS_SESSION_TOKEN", + ), + SecretStrInput( + name="credentials_profile_name", + display_name="Credentials Profile Name", + advanced=True, + info="The name of the profile to use from your " + "~/.aws/credentials file. " + "If not provided, the default profile will be used.", + value="AWS_CREDENTIALS_PROFILE_NAME", + ), + DropdownInput( + name="region_name", + display_name="Region Name", + value="us-east-1", + options=AWS_REGIONS, + info="The AWS region where your Bedrock resources are located.", + ), + MessageTextInput( + name="endpoint_url", + display_name="Endpoint URL", + advanced=True, + info="The URL of the AWS Bedrock endpoint to use.", + ), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + try: + from langchain_aws import BedrockEmbeddings + except ImportError as e: + msg = "langchain_aws is not installed. Please install it with `pip install langchain_aws`." + raise ImportError(msg) from e + try: + import boto3 + except ImportError as e: + msg = "boto3 is not installed. Please install it with `pip install boto3`." + raise ImportError(msg) from e + if self.aws_access_key_id or self.aws_secret_access_key: + session = boto3.Session( + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + aws_session_token=self.aws_session_token, + ) + elif self.credentials_profile_name: + session = boto3.Session(profile_name=self.credentials_profile_name) + else: + session = boto3.Session() + + client_params = {} + if self.endpoint_url: + client_params["endpoint_url"] = self.endpoint_url + if self.region_name: + client_params["region_name"] = self.region_name + + boto3_client = session.client("bedrock-runtime", **client_params) + return BedrockEmbeddings( + credentials_profile_name=self.credentials_profile_name, + client=boto3_client, + model_id=self.model_id, + endpoint_url=self.endpoint_url, + region_name=self.region_name, + ) diff --git a/src/backend/base/langflow/components/embeddings/astra_vectorize.py b/src/backend/base/langflow/components/embeddings/astra_vectorize.py new file mode 100644 index 000000000000..1e2880cf3e0b --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/astra_vectorize.py @@ -0,0 +1,123 @@ +from typing import Any + +from langflow.custom import Component +from langflow.inputs.inputs import DictInput, DropdownInput, MessageTextInput, SecretStrInput +from langflow.template.field.base import Output + + +class AstraVectorizeComponent(Component): + display_name: str = "Astra Vectorize [DEPRECATED]" + description: str = ( + "Configuration options for Astra Vectorize server-side embeddings. " + "This component is deprecated. Please use the Astra DB Component directly." + ) + documentation: str = "https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html" + icon = "AstraDB" + name = "AstraVectorize" + + VECTORIZE_PROVIDERS_MAPPING = { + "Azure OpenAI": ["azureOpenAI", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], + "Hugging Face - Dedicated": ["huggingfaceDedicated", ["endpoint-defined-model"]], + "Hugging Face - Serverless": [ + "huggingface", + [ + "sentence-transformers/all-MiniLM-L6-v2", + "intfloat/multilingual-e5-large", + "intfloat/multilingual-e5-large-instruct", + "BAAI/bge-small-en-v1.5", + "BAAI/bge-base-en-v1.5", + "BAAI/bge-large-en-v1.5", + ], + ], + "Jina AI": [ + "jinaAI", + [ + "jina-embeddings-v2-base-en", + "jina-embeddings-v2-base-de", + "jina-embeddings-v2-base-es", + "jina-embeddings-v2-base-code", + "jina-embeddings-v2-base-zh", + ], + ], + "Mistral AI": ["mistral", ["mistral-embed"]], + "NVIDIA": ["nvidia", ["NV-Embed-QA"]], + "OpenAI": ["openai", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], + "Upstage": ["upstageAI", ["solar-embedding-1-large"]], + "Voyage AI": [ + "voyageAI", + ["voyage-large-2-instruct", "voyage-law-2", "voyage-code-2", "voyage-large-2", "voyage-2"], + ], + } + VECTORIZE_MODELS_STR = "\n\n".join( + [provider + ": " + (", ".join(models[1])) for provider, models in VECTORIZE_PROVIDERS_MAPPING.items()] + ) + + inputs = [ + DropdownInput( + name="provider", + display_name="Provider", + options=VECTORIZE_PROVIDERS_MAPPING.keys(), + value="", + required=True, + ), + MessageTextInput( + name="model_name", + display_name="Model Name", + info="The embedding model to use for the selected provider. Each provider has a different set of models " + f"available (full list at https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n{VECTORIZE_MODELS_STR}", + required=True, + ), + MessageTextInput( + name="api_key_name", + display_name="API Key name", + info="The name of the embeddings provider API key stored on Astra. " + "If set, it will override the 'ProviderKey' in the authentication parameters.", + ), + DictInput( + name="authentication", + display_name="Authentication parameters", + is_list=True, + advanced=True, + ), + SecretStrInput( + name="provider_api_key", + display_name="Provider API Key", + info="An alternative to the Astra Authentication that passes an API key for the provider with each request " + "to Astra DB. " + "This may be used when Vectorize is configured for the collection, " + "but no corresponding provider secret is stored within Astra's key management system.", + advanced=True, + ), + DictInput( + name="authentication", + display_name="Authentication Parameters", + is_list=True, + advanced=True, + ), + DictInput( + name="model_parameters", + display_name="Model Parameters", + advanced=True, + is_list=True, + ), + ] + outputs = [ + Output(display_name="Vectorize", name="config", method="build_options", types=["dict"]), + ] + + def build_options(self) -> dict[str, Any]: + provider_value = self.VECTORIZE_PROVIDERS_MAPPING[self.provider][0] + authentication = {**(self.authentication or {})} + api_key_name = self.api_key_name + if api_key_name: + authentication["providerKey"] = api_key_name + return { + # must match astrapy.info.CollectionVectorServiceOptions + "collection_vector_service_options": { + "provider": provider_value, + "modelName": self.model_name, + "authentication": authentication, + "parameters": self.model_parameters or {}, + }, + "collection_embedding_api_key": self.provider_api_key, + } diff --git a/src/backend/base/langflow/components/embeddings/azure_openai.py b/src/backend/base/langflow/components/embeddings/azure_openai.py new file mode 100644 index 000000000000..cf6fabd91e8b --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/azure_openai.py @@ -0,0 +1,83 @@ +from langchain_openai import AzureOpenAIEmbeddings + +from langflow.base.models.model import LCModelComponent +from langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES +from langflow.field_typing import Embeddings +from langflow.io import DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput + + +class AzureOpenAIEmbeddingsComponent(LCModelComponent): + display_name: str = "Azure OpenAI Embeddings" + description: str = "Generate embeddings using Azure OpenAI models." + documentation: str = "https://python.langchain.com/docs/integrations/text_embedding/azureopenai" + icon = "Azure" + name = "AzureOpenAIEmbeddings" + + API_VERSION_OPTIONS = [ + "2022-12-01", + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + ] + + inputs = [ + DropdownInput( + name="model", + display_name="Model", + advanced=False, + options=OPENAI_EMBEDDING_MODEL_NAMES, + value=OPENAI_EMBEDDING_MODEL_NAMES[0], + ), + MessageTextInput( + name="azure_endpoint", + display_name="Azure Endpoint", + required=True, + info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`", + ), + MessageTextInput( + name="azure_deployment", + display_name="Deployment Name", + required=True, + ), + DropdownInput( + name="api_version", + display_name="API Version", + options=API_VERSION_OPTIONS, + value=API_VERSION_OPTIONS[-1], + advanced=True, + ), + SecretStrInput( + name="api_key", + display_name="API Key", + required=True, + ), + IntInput( + name="dimensions", + display_name="Dimensions", + info="The number of dimensions the resulting output embeddings should have. " + "Only supported by certain models.", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + try: + embeddings = AzureOpenAIEmbeddings( + model=self.model, + azure_endpoint=self.azure_endpoint, + azure_deployment=self.azure_deployment, + api_version=self.api_version, + api_key=self.api_key, + dimensions=self.dimensions or None, + ) + except Exception as e: + msg = f"Could not connect to AzureOpenAIEmbeddings API: {e}" + raise ValueError(msg) from e + + return embeddings diff --git a/src/backend/base/langflow/components/embeddings/cohere.py b/src/backend/base/langflow/components/embeddings/cohere.py new file mode 100644 index 000000000000..db71e3c8e2fc --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/cohere.py @@ -0,0 +1,46 @@ +from langchain_community.embeddings.cohere import CohereEmbeddings + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Embeddings +from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, Output, SecretStrInput + + +class CohereEmbeddingsComponent(LCModelComponent): + display_name = "Cohere Embeddings" + description = "Generate embeddings using Cohere models." + icon = "Cohere" + name = "CohereEmbeddings" + + inputs = [ + SecretStrInput(name="cohere_api_key", display_name="Cohere API Key"), + DropdownInput( + name="model", + display_name="Model", + advanced=True, + options=[ + "embed-english-v2.0", + "embed-multilingual-v2.0", + "embed-english-light-v2.0", + "embed-multilingual-light-v2.0", + ], + value="embed-english-v2.0", + ), + MessageTextInput(name="truncate", display_name="Truncate", advanced=True), + IntInput(name="max_retries", display_name="Max Retries", value=3, advanced=True), + MessageTextInput(name="user_agent", display_name="User Agent", advanced=True, value="langchain"), + FloatInput(name="request_timeout", display_name="Request Timeout", advanced=True), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + return CohereEmbeddings( + cohere_api_key=self.cohere_api_key, + model=self.model, + truncate=self.truncate, + max_retries=self.max_retries, + user_agent=self.user_agent, + request_timeout=self.request_timeout or None, + ) diff --git a/src/backend/base/langflow/components/embeddings/google_generative_ai.py b/src/backend/base/langflow/components/embeddings/google_generative_ai.py new file mode 100644 index 000000000000..edd2d63dd223 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/google_generative_ai.py @@ -0,0 +1,123 @@ +# from langflow.field_typing import Data +import numpy as np + +# TODO: remove ignore once the google package is published with types +from google.ai.generativelanguage_v1beta.types import BatchEmbedContentsRequest +from langchain_core.embeddings import Embeddings +from langchain_google_genai import GoogleGenerativeAIEmbeddings +from langchain_google_genai._common import GoogleGenerativeAIError + +from langflow.custom import Component +from langflow.io import MessageTextInput, Output, SecretStrInput + + +class GoogleGenerativeAIEmbeddingsComponent(Component): + display_name = "Google Generative AI Embeddings" + description = ( + "Connect to Google's generative AI embeddings service using the GoogleGenerativeAIEmbeddings class, " + "found in the langchain-google-genai package." + ) + documentation: str = "https://python.langchain.com/v0.2/docs/integrations/text_embedding/google_generative_ai/" + icon = "Google" + name = "Google Generative AI Embeddings" + + inputs = [ + SecretStrInput(name="api_key", display_name="API Key"), + MessageTextInput(name="model_name", display_name="Model Name", value="models/text-embedding-004"), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + if not self.api_key: + msg = "API Key is required" + raise ValueError(msg) + + class HotaGoogleGenerativeAIEmbeddings(GoogleGenerativeAIEmbeddings): + def __init__(self, *args, **kwargs) -> None: + super(GoogleGenerativeAIEmbeddings, self).__init__(*args, **kwargs) + + def embed_documents( + self, + texts: list[str], + *, + batch_size: int = 100, + task_type: str | None = None, + titles: list[str] | None = None, + output_dimensionality: int | None = 1536, + ) -> list[list[float]]: + """Embed a list of strings. + + Google Generative AI currently sets a max batch size of 100 strings. + + Args: + texts: List[str] The list of strings to embed. + batch_size: [int] The batch size of embeddings to send to the model + task_type: task_type (https://ai.google.dev/api/rest/v1/TaskType) + titles: An optional list of titles for texts provided. + Only applicable when TaskType is RETRIEVAL_DOCUMENT. + output_dimensionality: Optional reduced dimension for the output embedding. + https://ai.google.dev/api/rest/v1/models/batchEmbedContents#EmbedContentRequest + Returns: + List of embeddings, one for each text. + """ + embeddings: list[list[float]] = [] + batch_start_index = 0 + for batch in GoogleGenerativeAIEmbeddings._prepare_batches(texts, batch_size): + if titles: + titles_batch = titles[batch_start_index : batch_start_index + len(batch)] + batch_start_index += len(batch) + else: + titles_batch = [None] * len(batch) # type: ignore[list-item] + + requests = [ + self._prepare_request( + text=text, + task_type=task_type, + title=title, + output_dimensionality=output_dimensionality, + ) + for text, title in zip(batch, titles_batch, strict=True) + ] + + try: + result = self.client.batch_embed_contents( + BatchEmbedContentsRequest(requests=requests, model=self.model) + ) + except Exception as e: + msg = f"Error embedding content: {e}" + raise GoogleGenerativeAIError(msg) from e + embeddings.extend([list(np.pad(e.values, (0, 768), "constant")) for e in result.embeddings]) + return embeddings + + def embed_query( + self, + text: str, + task_type: str | None = None, + title: str | None = None, + output_dimensionality: int | None = 1536, + ) -> list[float]: + """Embed a text. + + Args: + text: The text to embed. + task_type: task_type (https://ai.google.dev/api/rest/v1/TaskType) + title: An optional title for the text. + Only applicable when TaskType is RETRIEVAL_DOCUMENT. + output_dimensionality: Optional reduced dimension for the output embedding. + https://ai.google.dev/api/rest/v1/models/batchEmbedContents#EmbedContentRequest + + Returns: + Embedding for the text. + """ + task_type = task_type or "RETRIEVAL_QUERY" + return self.embed_documents( + [text], + task_type=task_type, + titles=[title] if title else None, + output_dimensionality=output_dimensionality, + )[0] + + return HotaGoogleGenerativeAIEmbeddings(model=self.model_name, google_api_key=self.api_key) diff --git a/src/backend/base/langflow/components/embeddings/huggingface_inference_api.py b/src/backend/base/langflow/components/embeddings/huggingface_inference_api.py new file mode 100644 index 000000000000..1338b125b734 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/huggingface_inference_api.py @@ -0,0 +1,100 @@ +from urllib.parse import urlparse + +import requests +from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings +from pydantic.v1.types import SecretStr +from tenacity import retry, stop_after_attempt, wait_fixed + +from langflow.base.embeddings.model import LCEmbeddingsModel +from langflow.field_typing import Embeddings +from langflow.io import MessageTextInput, Output, SecretStrInput + + +class HuggingFaceInferenceAPIEmbeddingsComponent(LCEmbeddingsModel): + display_name = "HuggingFace Embeddings Inference" + description = "Generate embeddings using HuggingFace Text Embeddings Inference (TEI)" + documentation = "https://huggingface.co/docs/text-embeddings-inference/index" + icon = "HuggingFace" + name = "HuggingFaceInferenceAPIEmbeddings" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="API Key", + advanced=True, + info="Required for non-local inference endpoints. Local inference does not require an API Key.", + ), + MessageTextInput( + name="inference_endpoint", + display_name="Inference Endpoint", + required=True, + value="https://api-inference.huggingface.co/models/", + info="Custom inference endpoint URL.", + ), + MessageTextInput( + name="model_name", + display_name="Model Name", + value="BAAI/bge-large-en-v1.5", + info="The name of the model to use for text embeddings.", + ), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def validate_inference_endpoint(self, inference_endpoint: str) -> bool: + parsed_url = urlparse(inference_endpoint) + if not all([parsed_url.scheme, parsed_url.netloc]): + msg = ( + f"Invalid inference endpoint format: '{self.inference_endpoint}'. " + "Please ensure the URL includes both a scheme (e.g., 'http://' or 'https://') and a domain name. " + "Example: 'http://localhost:8080' or 'https://api.example.com'" + ) + raise ValueError(msg) + + try: + response = requests.get(f"{inference_endpoint}/health", timeout=5) + except requests.RequestException as e: + msg = ( + f"Inference endpoint '{inference_endpoint}' is not responding. " + "Please ensure the URL is correct and the service is running." + ) + raise ValueError(msg) from e + + if response.status_code != requests.codes.ok: + msg = f"HuggingFace health check failed: {response.status_code}" + raise ValueError(msg) + # returning True to solve linting error + return True + + def get_api_url(self) -> str: + if "huggingface" in self.inference_endpoint.lower(): + return f"{self.inference_endpoint}{self.model_name}" + return self.inference_endpoint + + @retry(stop=stop_after_attempt(3), wait=wait_fixed(2)) + def create_huggingface_embeddings( + self, api_key: SecretStr, api_url: str, model_name: str + ) -> HuggingFaceInferenceAPIEmbeddings: + return HuggingFaceInferenceAPIEmbeddings(api_key=api_key, api_url=api_url, model_name=model_name) + + def build_embeddings(self) -> Embeddings: + api_url = self.get_api_url() + + is_local_url = api_url.startswith(("http://localhost", "http://127.0.0.1")) + + if not self.api_key and is_local_url: + self.validate_inference_endpoint(api_url) + api_key = SecretStr("DummyAPIKeyForLocalDeployment") + elif not self.api_key: + msg = "API Key is required for non-local inference endpoints" + raise ValueError(msg) + else: + api_key = SecretStr(self.api_key).get_secret_value() + + try: + return self.create_huggingface_embeddings(api_key, api_url, self.model_name) + except Exception as e: + msg = "Could not connect to HuggingFace Inference API." + raise ValueError(msg) from e diff --git a/src/backend/base/langflow/components/embeddings/lmstudioembeddings.py b/src/backend/base/langflow/components/embeddings/lmstudioembeddings.py new file mode 100644 index 000000000000..40987197472a --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/lmstudioembeddings.py @@ -0,0 +1,88 @@ +from typing import Any +from urllib.parse import urljoin + +import httpx +from typing_extensions import override + +from langflow.base.embeddings.model import LCEmbeddingsModel +from langflow.field_typing import Embeddings +from langflow.inputs.inputs import DropdownInput, SecretStrInput +from langflow.io import FloatInput, MessageTextInput + + +class LMStudioEmbeddingsComponent(LCEmbeddingsModel): + display_name: str = "LM Studio Embeddings" + description: str = "Generate embeddings using LM Studio." + icon = "LMStudio" + + @override + def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): + if field_name == "model": + base_url_dict = build_config.get("base_url", {}) + base_url_load_from_db = base_url_dict.get("load_from_db", False) + base_url_value = base_url_dict.get("value") + if base_url_load_from_db: + base_url_value = self.variables(base_url_value) + elif not base_url_value: + base_url_value = "http://localhost:1234/v1" + build_config["model"]["options"] = self.get_model(base_url_value) + + return build_config + + def get_model(self, base_url_value: str) -> list[str]: + try: + url = urljoin(base_url_value, "/v1/models") + with httpx.Client() as client: + response = client.get(url) + response.raise_for_status() + data = response.json() + + return [model["id"] for model in data.get("data", [])] + except Exception as e: + msg = "Could not retrieve models. Please, make sure the LM Studio server is running." + raise ValueError(msg) from e + + inputs = [ + DropdownInput( + name="model", + display_name="Model", + advanced=False, + refresh_button=True, + ), + MessageTextInput( + name="base_url", + display_name="LM Studio Base URL", + refresh_button=True, + value="http://localhost:1234/v1", + ), + SecretStrInput( + name="api_key", + display_name="LM Studio API Key", + advanced=True, + value="LMSTUDIO_API_KEY", + ), + FloatInput( + name="temperature", + display_name="Model Temperature", + value=0.1, + advanced=True, + ), + ] + + def build_embeddings(self) -> Embeddings: + try: + from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings + except ImportError as e: + msg = "Please install langchain-nvidia-ai-endpoints to use LM Studio Embeddings." + raise ImportError(msg) from e + try: + output = NVIDIAEmbeddings( + model=self.model, + base_url=self.base_url, + temperature=self.temperature, + nvidia_api_key=self.api_key, + ) + except Exception as e: + msg = f"Could not connect to LM Studio API. Error: {e}" + raise ValueError(msg) from e + return output diff --git a/src/backend/base/langflow/components/embeddings/mistral.py b/src/backend/base/langflow/components/embeddings/mistral.py new file mode 100644 index 000000000000..7aaec00b3f9a --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/mistral.py @@ -0,0 +1,58 @@ +from langchain_mistralai.embeddings import MistralAIEmbeddings +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Embeddings +from langflow.io import DropdownInput, IntInput, MessageTextInput, Output, SecretStrInput + + +class MistralAIEmbeddingsComponent(LCModelComponent): + display_name = "MistralAI Embeddings" + description = "Generate embeddings using MistralAI models." + icon = "MistralAI" + name = "MistalAIEmbeddings" + + inputs = [ + DropdownInput( + name="model", + display_name="Model", + advanced=False, + options=["mistral-embed"], + value="mistral-embed", + ), + SecretStrInput(name="mistral_api_key", display_name="Mistral API Key"), + IntInput( + name="max_concurrent_requests", + display_name="Max Concurrent Requests", + advanced=True, + value=64, + ), + IntInput(name="max_retries", display_name="Max Retries", advanced=True, value=5), + IntInput(name="timeout", display_name="Request Timeout", advanced=True, value=120), + MessageTextInput( + name="endpoint", + display_name="API Endpoint", + advanced=True, + value="https://api.mistral.ai/v1/", + ), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + if not self.mistral_api_key: + msg = "Mistral API Key is required" + raise ValueError(msg) + + api_key = SecretStr(self.mistral_api_key).get_secret_value() + + return MistralAIEmbeddings( + api_key=api_key, + model=self.model, + endpoint=self.endpoint, + max_concurrent_requests=self.max_concurrent_requests, + max_retries=self.max_retries, + timeout=self.timeout, + ) diff --git a/src/backend/base/langflow/components/embeddings/nvidia.py b/src/backend/base/langflow/components/embeddings/nvidia.py new file mode 100644 index 000000000000..1aca0a33df0f --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/nvidia.py @@ -0,0 +1,74 @@ +from typing import Any + +from langflow.base.embeddings.model import LCEmbeddingsModel +from langflow.field_typing import Embeddings +from langflow.inputs.inputs import DropdownInput, SecretStrInput +from langflow.io import FloatInput, MessageTextInput +from langflow.schema.dotdict import dotdict + + +class NVIDIAEmbeddingsComponent(LCEmbeddingsModel): + display_name: str = "NVIDIA Embeddings" + description: str = "Generate embeddings using NVIDIA models." + icon = "NVIDIA" + + inputs = [ + DropdownInput( + name="model", + display_name="Model", + options=[ + "nvidia/nv-embed-v1", + "snowflake/arctic-embed-I", + ], + value="nvidia/nv-embed-v1", + ), + MessageTextInput( + name="base_url", + display_name="NVIDIA Base URL", + refresh_button=True, + value="https://integrate.api.nvidia.com/v1", + ), + SecretStrInput( + name="nvidia_api_key", + display_name="NVIDIA API Key", + info="The NVIDIA API Key.", + advanced=False, + value="NVIDIA_API_KEY", + ), + FloatInput( + name="temperature", + display_name="Model Temperature", + value=0.1, + advanced=True, + ), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "base_url" and field_value: + try: + build_model = self.build_embeddings() + ids = [model.id for model in build_model.available_models] + build_config["model"]["options"] = ids + build_config["model"]["value"] = ids[0] + except Exception as e: + msg = f"Error getting model names: {e}" + raise ValueError(msg) from e + return build_config + + def build_embeddings(self) -> Embeddings: + try: + from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings + except ImportError as e: + msg = "Please install langchain-nvidia-ai-endpoints to use the Nvidia model." + raise ImportError(msg) from e + try: + output = NVIDIAEmbeddings( + model=self.model, + base_url=self.base_url, + temperature=self.temperature, + nvidia_api_key=self.nvidia_api_key, + ) + except Exception as e: + msg = f"Could not connect to NVIDIA API. Error: {e}" + raise ValueError(msg) from e + return output diff --git a/src/backend/base/langflow/components/embeddings/ollama.py b/src/backend/base/langflow/components/embeddings/ollama.py new file mode 100644 index 000000000000..50da1ea7b112 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/ollama.py @@ -0,0 +1,48 @@ +from langchain_ollama import OllamaEmbeddings + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Embeddings +from langflow.io import FloatInput, MessageTextInput, Output + + +class OllamaEmbeddingsComponent(LCModelComponent): + display_name: str = "Ollama Embeddings" + description: str = "Generate embeddings using Ollama models." + documentation = "https://python.langchain.com/docs/integrations/text_embedding/ollama" + icon = "Ollama" + name = "OllamaEmbeddings" + + inputs = [ + MessageTextInput( + name="model", + display_name="Ollama Model", + value="llama3.1", + ), + MessageTextInput( + name="base_url", + display_name="Ollama Base URL", + value="http://localhost:11434", + ), + FloatInput( + name="temperature", + display_name="Model Temperature", + value=0.1, + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + try: + output = OllamaEmbeddings( + model=self.model, + base_url=self.base_url, + temperature=self.temperature, + ) + except Exception as e: + msg = "Could not connect to Ollama API." + raise ValueError(msg) from e + return output diff --git a/src/backend/base/langflow/components/embeddings/openai.py b/src/backend/base/langflow/components/embeddings/openai.py new file mode 100644 index 000000000000..6c075b97d6e2 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/openai.py @@ -0,0 +1,100 @@ +from langchain_openai import OpenAIEmbeddings + +from langflow.base.embeddings.model import LCEmbeddingsModel +from langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES +from langflow.field_typing import Embeddings +from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput + + +class OpenAIEmbeddingsComponent(LCEmbeddingsModel): + display_name = "OpenAI Embeddings" + description = "Generate embeddings using OpenAI models." + icon = "OpenAI" + name = "OpenAIEmbeddings" + + inputs = [ + DictInput( + name="default_headers", + display_name="Default Headers", + advanced=True, + info="Default headers to use for the API request.", + ), + DictInput( + name="default_query", + display_name="Default Query", + advanced=True, + info="Default query parameters to use for the API request.", + ), + IntInput(name="chunk_size", display_name="Chunk Size", advanced=True, value=1000), + MessageTextInput(name="client", display_name="Client", advanced=True), + MessageTextInput(name="deployment", display_name="Deployment", advanced=True), + IntInput(name="embedding_ctx_length", display_name="Embedding Context Length", advanced=True, value=1536), + IntInput(name="max_retries", display_name="Max Retries", value=3, advanced=True), + DropdownInput( + name="model", + display_name="Model", + advanced=False, + options=OPENAI_EMBEDDING_MODEL_NAMES, + value="text-embedding-3-small", + ), + DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), + SecretStrInput(name="openai_api_key", display_name="OpenAI API Key", value="OPENAI_API_KEY"), + MessageTextInput(name="openai_api_base", display_name="OpenAI API Base", advanced=True), + MessageTextInput(name="openai_api_type", display_name="OpenAI API Type", advanced=True), + MessageTextInput(name="openai_api_version", display_name="OpenAI API Version", advanced=True), + MessageTextInput( + name="openai_organization", + display_name="OpenAI Organization", + advanced=True, + ), + MessageTextInput(name="openai_proxy", display_name="OpenAI Proxy", advanced=True), + FloatInput(name="request_timeout", display_name="Request Timeout", advanced=True), + BoolInput(name="show_progress_bar", display_name="Show Progress Bar", advanced=True), + BoolInput(name="skip_empty", display_name="Skip Empty", advanced=True), + MessageTextInput( + name="tiktoken_model_name", + display_name="TikToken Model Name", + advanced=True, + ), + BoolInput( + name="tiktoken_enable", + display_name="TikToken Enable", + advanced=True, + value=True, + info="If False, you must have transformers installed.", + ), + IntInput( + name="dimensions", + display_name="Dimensions", + info="The number of dimensions the resulting output embeddings should have. " + "Only supported by certain models.", + advanced=True, + ), + ] + + def build_embeddings(self) -> Embeddings: + return OpenAIEmbeddings( + client=self.client or None, + model=self.model, + dimensions=self.dimensions or None, + deployment=self.deployment or None, + api_version=self.openai_api_version or None, + base_url=self.openai_api_base or None, + openai_api_type=self.openai_api_type or None, + openai_proxy=self.openai_proxy or None, + embedding_ctx_length=self.embedding_ctx_length, + api_key=self.openai_api_key or None, + organization=self.openai_organization or None, + allowed_special="all", + disallowed_special="all", + chunk_size=self.chunk_size, + max_retries=self.max_retries, + timeout=self.request_timeout or None, + tiktoken_enabled=self.tiktoken_enable, + tiktoken_model_name=self.tiktoken_model_name or None, + show_progress_bar=self.show_progress_bar, + model_kwargs=self.model_kwargs, + skip_empty=self.skip_empty, + default_headers=self.default_headers or None, + default_query=self.default_query or None, + ) diff --git a/src/backend/base/langflow/components/embeddings/similarity.py b/src/backend/base/langflow/components/embeddings/similarity.py new file mode 100644 index 000000000000..914943edb5c7 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/similarity.py @@ -0,0 +1,72 @@ +import numpy as np + +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, Output +from langflow.schema import Data + + +class EmbeddingSimilarityComponent(Component): + display_name: str = "Embedding Similarity" + description: str = "Compute selected form of similarity between two embedding vectors." + icon = "equal" + + inputs = [ + DataInput( + name="embedding_vectors", + display_name="Embedding Vectors", + info="A list containing exactly two data objects with embedding vectors to compare.", + is_list=True, + ), + DropdownInput( + name="similarity_metric", + display_name="Similarity Metric", + info="Select the similarity metric to use.", + options=["Cosine Similarity", "Euclidean Distance", "Manhattan Distance"], + value="Cosine Similarity", + ), + ] + + outputs = [ + Output(display_name="Similarity Data", name="similarity_data", method="compute_similarity"), + ] + + def compute_similarity(self) -> Data: + embedding_vectors: list[Data] = self.embedding_vectors + + # Assert that the list contains exactly two Data objects + if len(embedding_vectors) != 2: # noqa: PLR2004 + msg = "Exactly two embedding vectors are required." + raise ValueError(msg) + + embedding_1 = np.array(embedding_vectors[0].data["embeddings"]) + embedding_2 = np.array(embedding_vectors[1].data["embeddings"]) + + if embedding_1.shape != embedding_2.shape: + similarity_score = {"error": "Embeddings must have the same dimensions."} + else: + similarity_metric = self.similarity_metric + + if similarity_metric == "Cosine Similarity": + score = np.dot(embedding_1, embedding_2) / (np.linalg.norm(embedding_1) * np.linalg.norm(embedding_2)) + similarity_score = {"cosine_similarity": score} + + elif similarity_metric == "Euclidean Distance": + score = np.linalg.norm(embedding_1 - embedding_2) + similarity_score = {"euclidean_distance": score} + + elif similarity_metric == "Manhattan Distance": + score = np.sum(np.abs(embedding_1 - embedding_2)) + similarity_score = {"manhattan_distance": score} + + # Create a Data object to encapsulate the similarity score and additional information + similarity_data = Data( + data={ + "embedding_1": embedding_vectors[0].data["embeddings"], + "embedding_2": embedding_vectors[1].data["embeddings"], + "similarity_score": similarity_score, + }, + text_key="similarity_score", + ) + + self.status = similarity_data + return similarity_data diff --git a/src/backend/base/langflow/components/embeddings/text_embedder.py b/src/backend/base/langflow/components/embeddings/text_embedder.py new file mode 100644 index 000000000000..d9a40e2ec285 --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/text_embedder.py @@ -0,0 +1,81 @@ +import logging +from typing import TYPE_CHECKING + +from langflow.custom import Component +from langflow.io import HandleInput, MessageInput, Output +from langflow.schema import Data + +if TYPE_CHECKING: + from langflow.field_typing import Embeddings + from langflow.schema.message import Message + + +class TextEmbedderComponent(Component): + display_name: str = "Text Embedder" + description: str = "Generate embeddings for a given message using the specified embedding model." + icon = "binary" + inputs = [ + HandleInput( + name="embedding_model", + display_name="Embedding Model", + info="The embedding model to use for generating embeddings.", + input_types=["Embeddings"], + ), + MessageInput( + name="message", + display_name="Message", + info="The message to generate embeddings for.", + ), + ] + outputs = [ + Output(display_name="Embedding Data", name="embeddings", method="generate_embeddings"), + ] + + def generate_embeddings(self) -> Data: + try: + embedding_model: Embeddings = self.embedding_model + message: Message = self.message + + # Validate embedding model + if not embedding_model: + msg = "Embedding model not provided" + raise ValueError(msg) + + # Extract the text content from the message + text_content = message.text if message and message.text else "" + if not text_content: + msg = "No text content found in message" + raise ValueError(msg) + + # Check if the embedding model has the required attributes + if not hasattr(embedding_model, "client") or not embedding_model.client: + msg = "Embedding model client not properly initialized" + raise ValueError(msg) + + # Ensure the base URL has proper protocol + if hasattr(embedding_model.client, "base_url"): + base_url = embedding_model.client.base_url + if not base_url.startswith(("http://", "https://")): + embedding_model.client.base_url = f"https://{base_url}" + + # Generate embeddings using the provided embedding model + embeddings = embedding_model.embed_documents([text_content]) + + # Validate embeddings output + if not embeddings or not isinstance(embeddings, list): + msg = "Invalid embeddings generated" + raise ValueError(msg) + + embedding_vector = embeddings[0] + + except Exception as e: + logging.exception("Error generating embeddings") + # Return empty data with error status + error_data = Data(data={"text": "", "embeddings": [], "error": str(e)}) + self.status = {"error": str(e)} + return error_data + + # Create a Data object to encapsulate the results + result_data = Data(data={"text": text_content, "embeddings": embedding_vector}) + self.status = {"text": text_content, "embeddings": embedding_vector} + return result_data diff --git a/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py b/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py deleted file mode 100644 index 34859dd81005..000000000000 --- a/src/backend/base/langflow/components/embeddings/util/AIMLEmbeddingsImpl.py +++ /dev/null @@ -1,62 +0,0 @@ -import concurrent.futures -import json -from typing import List - -import httpx -from langchain_core.pydantic_v1 import BaseModel, SecretStr -from loguru import logger - -from langflow.field_typing import Embeddings - - -class AIMLEmbeddingsImpl(BaseModel, Embeddings): - embeddings_completion_url: str = "https://api.aimlapi.com/v1/embeddings" - - api_key: SecretStr - model: str - - def embed_documents(self, texts: List[str]) -> List[List[float]]: - embeddings = [None] * len(texts) - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {self.api_key.get_secret_value()}", - } - - with httpx.Client() as client: - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [] - for i, text in enumerate(texts): - futures.append((i, executor.submit(self._embed_text, client, headers, text))) - - for index, future in futures: - try: - result_data = future.result() - assert len(result_data["data"]) == 1, "Expected one embedding" - embeddings[index] = result_data["data"][0]["embedding"] - except ( - httpx.HTTPStatusError, - httpx.RequestError, - json.JSONDecodeError, - KeyError, - ) as e: - logger.error(f"Error occurred: {e}") - raise - - return embeddings # type: ignore - - def _embed_text(self, client: httpx.Client, headers: dict, text: str) -> dict: - payload = { - "model": self.model, - "input": text, - } - response = client.post( - self.embeddings_completion_url, - headers=headers, - json=payload, - ) - response.raise_for_status() - result_data = response.json() - return result_data - - def embed_query(self, text: str) -> List[float]: - return self.embed_documents([text])[0] diff --git a/src/backend/base/langflow/components/embeddings/vertexai.py b/src/backend/base/langflow/components/embeddings/vertexai.py new file mode 100644 index 000000000000..6c74f265134e --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/vertexai.py @@ -0,0 +1,66 @@ +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Embeddings +from langflow.io import BoolInput, FileInput, FloatInput, IntInput, MessageTextInput, Output + + +class VertexAIEmbeddingsComponent(LCModelComponent): + display_name = "VertexAI Embeddings" + description = "Generate embeddings using Google Cloud VertexAI models." + icon = "VertexAI" + name = "VertexAIEmbeddings" + + inputs = [ + FileInput( + name="credentials", + display_name="Credentials", + info="JSON credentials file. Leave empty to fallback to environment variables", + value="", + file_types=["json"], + ), + MessageTextInput(name="location", display_name="Location", value="us-central1", advanced=True), + MessageTextInput(name="project", display_name="Project", info="The project ID.", advanced=True), + IntInput(name="max_output_tokens", display_name="Max Output Tokens", advanced=True), + IntInput(name="max_retries", display_name="Max Retries", value=1, advanced=True), + MessageTextInput(name="model_name", display_name="Model Name", value="textembedding-gecko"), + IntInput(name="n", display_name="N", value=1, advanced=True), + IntInput(name="request_parallelism", value=5, display_name="Request Parallelism", advanced=True), + MessageTextInput(name="stop_sequences", display_name="Stop", advanced=True, is_list=True), + BoolInput(name="streaming", display_name="Streaming", value=False, advanced=True), + FloatInput(name="temperature", value=0.0, display_name="Temperature"), + IntInput(name="top_k", display_name="Top K", advanced=True), + FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True), + ] + + outputs = [ + Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), + ] + + def build_embeddings(self) -> Embeddings: + try: + from langchain_google_vertexai import VertexAIEmbeddings + except ImportError as e: + msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." + raise ImportError(msg) from e + + from google.oauth2 import service_account + + if self.credentials: + gcloud_credentials = service_account.Credentials.from_service_account_file(self.credentials) + else: + # will fallback to environment variable or inferred from gcloud CLI + gcloud_credentials = None + return VertexAIEmbeddings( + credentials=gcloud_credentials, + location=self.location, + max_output_tokens=self.max_output_tokens or None, + max_retries=self.max_retries, + model_name=self.model_name, + n=self.n, + project=self.project, + request_parallelism=self.request_parallelism, + stop=self.stop_sequences or None, + streaming=self.streaming, + temperature=self.temperature, + top_k=self.top_k or None, + top_p=self.top_p, + ) diff --git a/src/backend/base/langflow/components/firecrawl/__init__.py b/src/backend/base/langflow/components/firecrawl/__init__.py new file mode 100644 index 000000000000..9c1a2cec2a00 --- /dev/null +++ b/src/backend/base/langflow/components/firecrawl/__init__.py @@ -0,0 +1,4 @@ +from .firecrawl_crawl_api import FirecrawlCrawlApi +from .firecrawl_scrape_api import FirecrawlScrapeApi + +__all__ = ["FirecrawlCrawlApi", "FirecrawlScrapeApi"] diff --git a/src/backend/base/langflow/components/firecrawl/firecrawl_crawl_api.py b/src/backend/base/langflow/components/firecrawl/firecrawl_crawl_api.py new file mode 100644 index 000000000000..07061a57ba10 --- /dev/null +++ b/src/backend/base/langflow/components/firecrawl/firecrawl_crawl_api.py @@ -0,0 +1,80 @@ +import uuid + +from langflow.custom import Component +from langflow.io import ( + DataInput, + IntInput, + Output, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class FirecrawlCrawlApi(Component): + display_name: str = "FirecrawlCrawlApi" + description: str = "Firecrawl Crawl API." + name = "FirecrawlCrawlApi" + + output_types: list[str] = ["Document"] + documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/crawl-post" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="API Key", + required=True, + password=True, + info="The API key to use Firecrawl API.", + ), + StrInput( + name="url", + display_name="URL", + required=True, + info="The URL to scrape.", + ), + IntInput( + name="timeout", + display_name="Timeout", + info="Timeout in milliseconds for the request.", + ), + StrInput( + name="idempotency_key", + display_name="Idempotency Key", + info="Optional idempotency key to ensure unique requests.", + ), + DataInput( + name="crawlerOptions", + display_name="Crawler Options", + info="The crawler options to send with the request.", + ), + DataInput( + name="scrapeOptions", + display_name="Scrape Options", + info="The page options to send with the request.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="crawl"), + ] + idempotency_key: str | None = None + + def crawl(self) -> Data: + try: + from firecrawl.firecrawl import FirecrawlApp + except ImportError as e: + msg = "Could not import firecrawl integration package. Please install it with `pip install firecrawl-py`." + raise ImportError(msg) from e + + params = self.crawlerOptions.__dict__["data"] if self.crawlerOptions else {} + scrape_options_dict = self.scrapeOptions.__dict__["data"] if self.scrapeOptions else {} + if scrape_options_dict: + params["scrapeOptions"] = scrape_options_dict + + if not self.idempotency_key: + self.idempotency_key = str(uuid.uuid4()) + + app = FirecrawlApp(api_key=self.api_key) + crawl_result = app.crawl_url(self.url, params=params, idempotency_key=self.idempotency_key) + return Data(data={"results": crawl_result}) diff --git a/src/backend/base/langflow/components/firecrawl/firecrawl_scrape_api.py b/src/backend/base/langflow/components/firecrawl/firecrawl_scrape_api.py new file mode 100644 index 000000000000..717ae8e0c27c --- /dev/null +++ b/src/backend/base/langflow/components/firecrawl/firecrawl_scrape_api.py @@ -0,0 +1,69 @@ +from langflow.custom import Component +from langflow.io import ( + DataInput, + IntInput, + Output, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class FirecrawlScrapeApi(Component): + display_name: str = "FirecrawlScrapeApi" + description: str = "Firecrawl Scrape API." + name = "FirecrawlScrapeApi" + + output_types: list[str] = ["Document"] + documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/scrape" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="API Key", + required=True, + password=True, + info="The API key to use Firecrawl API.", + ), + StrInput( + name="url", + display_name="URL", + required=True, + info="The URL to scrape.", + ), + IntInput( + name="timeout", + display_name="Timeout", + info="Timeout in milliseconds for the request.", + ), + DataInput( + name="scrapeOptions", + display_name="Scrape Options", + info="The page options to send with the request.", + ), + DataInput( # https://docs.firecrawl.dev/features/extract + name="extractorOptions", + display_name="Extractor Options", + info="The extractor options to send with the request.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="crawl"), + ] + + def crawl(self) -> list[Data]: + try: + from firecrawl.firecrawl import FirecrawlApp + except ImportError as e: + msg = "Could not import firecrawl integration package. Please install it with `pip install firecrawl-py`." + raise ImportError(msg) from e + + params = self.scrapeOptions.__dict__["data"] if self.scrapeOptions else {} + extractor_options_dict = self.extractorOptions.__dict__["data"] if self.extractorOptions else {} + if extractor_options_dict: + params["extract"] = extractor_options_dict + + app = FirecrawlApp(api_key=self.api_key) + results = app.scrape_url(self.url, params=params) + return Data(data=results) diff --git a/src/backend/base/langflow/components/git/__init__.py b/src/backend/base/langflow/components/git/__init__.py new file mode 100644 index 000000000000..20de11c02158 --- /dev/null +++ b/src/backend/base/langflow/components/git/__init__.py @@ -0,0 +1,3 @@ +from .git import GitLoaderComponent + +__all__ = ["GitLoaderComponent"] diff --git a/src/backend/base/langflow/components/git/git.py b/src/backend/base/langflow/components/git/git.py new file mode 100644 index 000000000000..399a8c174f4c --- /dev/null +++ b/src/backend/base/langflow/components/git/git.py @@ -0,0 +1,114 @@ +import re +from pathlib import Path + +from langchain_community.document_loaders.git import GitLoader + +from langflow.custom import Component +from langflow.io import MessageTextInput, Output +from langflow.schema import Data + + +class GitLoaderComponent(Component): + display_name = "GitLoader" + description = "Load files from a Git repository" + documentation = "https://python.langchain.com/v0.2/docs/integrations/document_loaders/git/" + trace_type = "tool" + icon = "GitLoader" + name = "GitLoader" + + inputs = [ + MessageTextInput( + name="repo_path", + display_name="Repository Path", + required=True, + info="The local path to the Git repository.", + ), + MessageTextInput( + name="clone_url", + display_name="Clone URL", + required=False, + info="The URL to clone the Git repository from.", + ), + MessageTextInput( + name="branch", + display_name="Branch", + required=False, + value="main", + info="The branch to load files from. Defaults to 'main'.", + ), + MessageTextInput( + name="file_filter", + display_name="File Filter", + required=False, + advanced=True, + info="A list of patterns to filter files. Example to include only .py files: '*.py'. " + "Example to exclude .py files: '!*.py'. Multiple patterns can be separated by commas.", + ), + MessageTextInput( + name="content_filter", + display_name="Content Filter", + required=False, + advanced=True, + info="A regex pattern to filter files based on their content.", + ), + ] + + outputs = [ + Output(name="data", display_name="Data", method="load_documents"), + ] + + @staticmethod + def is_binary(file_path: str) -> bool: + """Check if a file is binary by looking for null bytes. + + This is necessary because when searches are performed using + the content_filter, binary files need to be ignored. + """ + with Path(file_path).open("rb") as file: + return b"\x00" in file.read(1024) + + def build_gitloader(self) -> GitLoader: + file_filter_patterns = getattr(self, "file_filter", None) + content_filter_pattern = getattr(self, "content_filter", None) + + file_filters = [] + if file_filter_patterns: + patterns = [pattern.strip() for pattern in file_filter_patterns.split(",")] + + def file_filter(file_path: Path) -> bool: + if len(patterns) == 1 and patterns[0].startswith("!"): + return not file_path.match(patterns[0][1:]) + included = any(file_path.match(pattern) for pattern in patterns if not pattern.startswith("!")) + excluded = any(file_path.match(pattern[1:]) for pattern in patterns if pattern.startswith("!")) + return included and not excluded + + file_filters.append(file_filter) + + if content_filter_pattern: + content_regex = re.compile(content_filter_pattern) + + def content_filter(file_path: Path) -> bool: + content = file_path.read_text(encoding="utf-8", errors="ignore") + return bool(content_regex.search(content)) + + file_filters.append(content_filter) + + def combined_filter(file_path: str) -> bool: + path = Path(file_path) + if self.is_binary(file_path): + return False + return all(f(path) for f in file_filters) + + return GitLoader( + repo_path=self.repo_path, + clone_url=self.clone_url, + branch=self.branch, + file_filter=combined_filter, + ) + + def load_documents(self) -> list[Data]: + gitloader = self.build_gitloader() + documents = list(gitloader.lazy_load()) + data = [Data.from_document(doc) for doc in documents] + self.status = data + return data diff --git a/src/backend/base/langflow/components/google/__init__.py b/src/backend/base/langflow/components/google/__init__.py new file mode 100644 index 000000000000..eda3a1e3b9db --- /dev/null +++ b/src/backend/base/langflow/components/google/__init__.py @@ -0,0 +1,11 @@ +from .gmail import GmailLoaderComponent +from .google_drive import GoogleDriveComponent +from .google_drive_search import GoogleDriveSearchComponent +from .google_oauth_token import GoogleOAuthToken + +__all__ = [ + "GmailLoaderComponent", + "GoogleDriveComponent", + "GoogleDriveSearchComponent", + "GoogleOAuthToken", +] diff --git a/src/backend/base/langflow/components/google/gmail.py b/src/backend/base/langflow/components/google/gmail.py new file mode 100644 index 000000000000..2f565f183241 --- /dev/null +++ b/src/backend/base/langflow/components/google/gmail.py @@ -0,0 +1,191 @@ +import base64 +import json +import re +from collections.abc import Iterator +from json.decoder import JSONDecodeError +from typing import Any + +from google.auth.exceptions import RefreshError +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import HumanMessage +from langchain_google_community.gmail.loader import GMailLoader +from loguru import logger + +from langflow.custom import Component +from langflow.inputs import MessageTextInput +from langflow.io import SecretStrInput +from langflow.schema import Data +from langflow.template import Output + + +class GmailLoaderComponent(Component): + display_name = "Gmail Loader" + description = "Loads emails from Gmail using provided credentials." + icon = "Google" + + inputs = [ + SecretStrInput( + name="json_string", + display_name="JSON String of the Service Account Token", + info="JSON string containing OAuth 2.0 access token information for service account access", + required=True, + value="""{ + "account": "", + "client_id": "", + "client_secret": "", + "expiry": "", + "refresh_token": "", + "scopes": [ + "https://www.googleapis.com/auth/gmail.readonly", + ], + "token": "", + "token_uri": "https://oauth2.googleapis.com/token", + "universe_domain": "googleapis.com" + }""", + ), + MessageTextInput( + name="label_ids", + display_name="Label IDs", + info="Comma-separated list of label IDs to filter emails.", + required=True, + value="INBOX,SENT,UNREAD,IMPORTANT", + ), + MessageTextInput( + name="max_results", + display_name="Max Results", + info="Maximum number of emails to load.", + required=True, + value="10", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="load_emails"), + ] + + def load_emails(self) -> Data: + class CustomGMailLoader(GMailLoader): + def __init__( + self, creds: Any, *, n: int = 100, label_ids: list[str] | None = None, raise_error: bool = False + ) -> None: + super().__init__(creds, n, raise_error) + self.label_ids = label_ids if label_ids is not None else ["SENT"] + + def clean_message_content(self, message): + # Remove URLs + message = re.sub(r"http\S+|www\S+|https\S+", "", message, flags=re.MULTILINE) + + # Remove email addresses + message = re.sub(r"\S+@\S+", "", message) + + # Remove special characters and excessive whitespace + message = re.sub(r"[^A-Za-z0-9\s]+", " ", message) + message = re.sub(r"\s{2,}", " ", message) + + # Trim leading and trailing whitespace + return message.strip() + + def _extract_email_content(self, msg: Any) -> HumanMessage: + from_email = None + for values in msg["payload"]["headers"]: + name = values["name"] + if name == "From": + from_email = values["value"] + if from_email is None: + msg = "From email not found." + raise ValueError(msg) + + parts = msg["payload"]["parts"] if "parts" in msg["payload"] else [msg["payload"]] + + for part in parts: + if part["mimeType"] == "text/plain": + data = part["body"]["data"] + data = base64.urlsafe_b64decode(data).decode("utf-8") + pattern = re.compile(r"\r\nOn .+(\r\n)*wrote:\r\n") + newest_response = re.split(pattern, data)[0] + return HumanMessage( + content=self.clean_message_content(newest_response), + additional_kwargs={"sender": from_email}, + ) + msg = "No plain text part found in the email." + raise ValueError(msg) + + def _get_message_data(self, service: Any, message: Any) -> ChatSession: + msg = service.users().messages().get(userId="me", id=message["id"]).execute() + message_content = self._extract_email_content(msg) + + in_reply_to = None + email_data = msg["payload"]["headers"] + for values in email_data: + name = values["name"] + if name == "In-Reply-To": + in_reply_to = values["value"] + + thread_id = msg["threadId"] + + if in_reply_to: + thread = service.users().threads().get(userId="me", id=thread_id).execute() + messages = thread["messages"] + + response_email = None + for _message in messages: + email_data = _message["payload"]["headers"] + for values in email_data: + if values["name"] == "Message-ID": + message_id = values["value"] + if message_id == in_reply_to: + response_email = _message + if response_email is None: + msg = "Response email not found in the thread." + raise ValueError(msg) + starter_content = self._extract_email_content(response_email) + return ChatSession(messages=[starter_content, message_content]) + return ChatSession(messages=[message_content]) + + def lazy_load(self) -> Iterator[ChatSession]: + service = build("gmail", "v1", credentials=self.creds) + results = ( + service.users().messages().list(userId="me", labelIds=self.label_ids, maxResults=self.n).execute() + ) + messages = results.get("messages", []) + if not messages: + logger.warning("No messages found with the specified labels.") + for message in messages: + try: + yield self._get_message_data(service, message) + except Exception: + if self.raise_error: + raise + else: + logger.exception(f"Error processing message {message['id']}") + + json_string = self.json_string + label_ids = self.label_ids.split(",") if self.label_ids else ["INBOX"] + max_results = int(self.max_results) if self.max_results else 100 + + # Load the token information from the JSON string + try: + token_info = json.loads(json_string) + except JSONDecodeError as e: + msg = "Invalid JSON string" + raise ValueError(msg) from e + + creds = Credentials.from_authorized_user_info(token_info) + + # Initialize the custom loader with the provided credentials + loader = CustomGMailLoader(creds=creds, n=max_results, label_ids=label_ids) + + try: + docs = loader.load() + except RefreshError as e: + msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." + raise ValueError(msg) from e + except Exception as e: + msg = f"Error loading documents: {e}" + raise ValueError(msg) from e + + # Return the loaded documents + self.status = docs + return Data(data={"text": docs}) diff --git a/src/backend/base/langflow/components/google/google_drive.py b/src/backend/base/langflow/components/google/google_drive.py new file mode 100644 index 000000000000..6ba03d715ee0 --- /dev/null +++ b/src/backend/base/langflow/components/google/google_drive.py @@ -0,0 +1,91 @@ +import json +from json.decoder import JSONDecodeError + +from google.auth.exceptions import RefreshError +from google.oauth2.credentials import Credentials +from langchain_google_community import GoogleDriveLoader + +from langflow.custom import Component +from langflow.helpers.data import docs_to_data +from langflow.inputs import MessageTextInput +from langflow.io import SecretStrInput +from langflow.schema import Data +from langflow.template import Output + + +class GoogleDriveComponent(Component): + display_name = "Google Drive Loader" + description = "Loads documents from Google Drive using provided credentials." + icon = "Google" + + inputs = [ + SecretStrInput( + name="json_string", + display_name="JSON String of the Service Account Token", + info="JSON string containing OAuth 2.0 access token information for service account access", + required=True, + ), + MessageTextInput( + name="document_id", display_name="Document ID", info="Single Google Drive document ID", required=True + ), + ] + + outputs = [ + Output(display_name="Loaded Documents", name="docs", method="load_documents"), + ] + + def load_documents(self) -> Data: + class CustomGoogleDriveLoader(GoogleDriveLoader): + creds: Credentials | None = None + """Credentials object to be passed directly.""" + + def _load_credentials(self): + """Load credentials from the provided creds attribute or fallback to the original method.""" + if self.creds: + return self.creds + msg = "No credentials provided." + raise ValueError(msg) + + class Config: + arbitrary_types_allowed = True + + json_string = self.json_string + + document_ids = [self.document_id] + if len(document_ids) != 1: + msg = "Expected a single document ID" + raise ValueError(msg) + + # TODO: Add validation to check if the document ID is valid + + # Load the token information from the JSON string + try: + token_info = json.loads(json_string) + except JSONDecodeError as e: + msg = "Invalid JSON string" + raise ValueError(msg) from e + + # Initialize the custom loader with the provided credentials and document IDs + loader = CustomGoogleDriveLoader( + creds=Credentials.from_authorized_user_info(token_info), document_ids=document_ids + ) + + # Load the documents + try: + docs = loader.load() + # catch google.auth.exceptions.RefreshError + except RefreshError as e: + msg = "Authentication error: Unable to refresh authentication token. Please try to reauthenticate." + raise ValueError(msg) from e + except Exception as e: + msg = f"Error loading documents: {e}" + raise ValueError(msg) from e + + if len(docs) != 1: + msg = "Expected a single document to be loaded." + raise ValueError(msg) + + data = docs_to_data(docs) + # Return the loaded documents + self.status = data + return Data(data={"text": data}) diff --git a/src/backend/base/langflow/components/google/google_drive_search.py b/src/backend/base/langflow/components/google/google_drive_search.py new file mode 100644 index 000000000000..824c68cdda35 --- /dev/null +++ b/src/backend/base/langflow/components/google/google_drive_search.py @@ -0,0 +1,151 @@ +import json + +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build + +from langflow.custom import Component +from langflow.inputs import DropdownInput, MessageTextInput +from langflow.io import SecretStrInput +from langflow.schema import Data +from langflow.template import Output + + +class GoogleDriveSearchComponent(Component): + display_name = "Google Drive Search" + description = "Searches Google Drive files using provided credentials and query parameters." + icon = "Google" + + inputs = [ + SecretStrInput( + name="token_string", + display_name="Token String", + info="JSON string containing OAuth 2.0 access token information for service account access", + required=True, + ), + DropdownInput( + name="query_item", + display_name="Query Item", + options=[ + "name", + "fullText", + "mimeType", + "modifiedTime", + "viewedByMeTime", + "trashed", + "starred", + "parents", + "owners", + "writers", + "readers", + "sharedWithMe", + "createdTime", + "properties", + "appProperties", + "visibility", + "shortcutDetails.targetId", + ], + info="The field to query.", + required=True, + ), + DropdownInput( + name="valid_operator", + display_name="Valid Operator", + options=["contains", "=", "!=", "<=", "<", ">", ">=", "in", "has"], + info="Operator to use in the query.", + required=True, + ), + MessageTextInput( + name="search_term", + display_name="Search Term", + info="The value to search for in the specified query item.", + required=True, + ), + MessageTextInput( + name="query_string", + display_name="Query String", + info="The query string used for searching. You can edit this manually.", + value="", # This will be updated with the generated query string + ), + ] + + outputs = [ + Output(display_name="Document URLs", name="doc_urls", method="search_doc_urls"), + Output(display_name="Document IDs", name="doc_ids", method="search_doc_ids"), + Output(display_name="Document Titles", name="doc_titles", method="search_doc_titles"), + Output(display_name="Data", name="Data", method="search_data"), + ] + + def generate_query_string(self) -> str: + query_item = self.query_item + valid_operator = self.valid_operator + search_term = self.search_term + + # Construct the query string + query = f"{query_item} {valid_operator} '{search_term}'" + + # Update the editable query string input with the generated query + self.query_string = query + + return query + + def on_inputs_changed(self) -> None: + # Automatically regenerate the query string when inputs change + self.generate_query_string() + + def generate_file_url(self, file_id: str, mime_type: str) -> str: + """Generates the appropriate Google Drive URL for a file based on its MIME type.""" + return { + "application/vnd.google-apps.document": f"https://docs.google.com/document/d/{file_id}/edit", + "application/vnd.google-apps.spreadsheet": f"https://docs.google.com/spreadsheets/d/{file_id}/edit", + "application/vnd.google-apps.presentation": f"https://docs.google.com/presentation/d/{file_id}/edit", + "application/vnd.google-apps.drawing": f"https://docs.google.com/drawings/d/{file_id}/edit", + "application/pdf": f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk", + }.get(mime_type, f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk") + + def search_files(self) -> dict: + # Load the token information from the JSON string + token_info = json.loads(self.token_string) + creds = Credentials.from_authorized_user_info(token_info) + + # Use the query string from the input (which might have been edited by the user) + query = self.query_string or self.generate_query_string() + + # Initialize the Google Drive API service + service = build("drive", "v3", credentials=creds) + + # Perform the search + results = service.files().list(q=query, pageSize=5, fields="nextPageToken, files(id, name, mimeType)").execute() + items = results.get("files", []) + + doc_urls = [] + doc_ids = [] + doc_titles_urls = [] + doc_titles = [] + + if items: + for item in items: + # Directly use the file ID, title, and MIME type to generate the URL + file_id = item["id"] + file_title = item["name"] + mime_type = item["mimeType"] + file_url = self.generate_file_url(file_id, mime_type) + + # Store the URL, ID, and title+URL in their respective lists + doc_urls.append(file_url) + doc_ids.append(file_id) + doc_titles.append(file_title) + doc_titles_urls.append({"title": file_title, "url": file_url}) + + return {"doc_urls": doc_urls, "doc_ids": doc_ids, "doc_titles_urls": doc_titles_urls, "doc_titles": doc_titles} + + def search_doc_ids(self) -> list[str]: + return self.search_files()["doc_ids"] + + def search_doc_urls(self) -> list[str]: + return self.search_files()["doc_urls"] + + def search_doc_titles(self) -> list[str]: + return self.search_files()["doc_titles"] + + def search_data(self) -> Data: + return Data(data={"text": self.search_files()["doc_titles_urls"]}) diff --git a/src/backend/base/langflow/components/google/google_oauth_token.py b/src/backend/base/langflow/components/google/google_oauth_token.py new file mode 100644 index 000000000000..b7d43a8a8dc7 --- /dev/null +++ b/src/backend/base/langflow/components/google/google_oauth_token.py @@ -0,0 +1,89 @@ +import json +import re +from pathlib import Path + +from google.auth.transport.requests import Request +from google.oauth2.credentials import Credentials +from google_auth_oauthlib.flow import InstalledAppFlow + +from langflow.custom import Component +from langflow.io import FileInput, MultilineInput, Output +from langflow.schema import Data + + +class GoogleOAuthToken(Component): + display_name = "Google OAuth Token" + description = "Generates a JSON string with your Google OAuth token." + documentation: str = "https://developers.google.com/identity/protocols/oauth2/web-server?hl=pt-br#python_1" + icon = "Google" + name = "GoogleOAuthToken" + + inputs = [ + MultilineInput( + name="scopes", + display_name="Scopes", + info="Input scopes for your application.", + required=True, + ), + FileInput( + name="oauth_credentials", + display_name="Credentials File", + info="Input OAuth Credentials file (e.g. credentials.json).", + file_types=["json"], + required=True, + ), + ] + + outputs = [ + Output(display_name="Output", name="output", method="build_output"), + ] + + def validate_scopes(self, scopes): + pattern = ( + r"^(https://www\.googleapis\.com/auth/[\w\.\-]+" + r"|mail\.google\.com/" + r"|www\.google\.com/calendar/feeds" + r"|www\.google\.com/m8/feeds)" + r"(,\s*https://www\.googleapis\.com/auth/[\w\.\-]+" + r"|mail\.google\.com/" + r"|www\.google\.com/calendar/feeds" + r"|www\.google\.com/m8/feeds)*$" + ) + if not re.match(pattern, scopes): + error_message = "Invalid scope format." + raise ValueError(error_message) + + def build_output(self) -> Data: + self.validate_scopes(self.scopes) + + user_scopes = [scope.strip() for scope in self.scopes.split(",")] + if self.scopes: + scopes = user_scopes + else: + error_message = "Incorrect scope, check the scopes field." + raise ValueError(error_message) + + creds = None + token_path = Path("token.json") + + if token_path.exists(): + creds = Credentials.from_authorized_user_file(str(token_path), scopes) + + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + if self.oauth_credentials: + client_secret_file = self.oauth_credentials + else: + error_message = "OAuth 2.0 Credentials file not provided." + raise ValueError(error_message) + + flow = InstalledAppFlow.from_client_secrets_file(client_secret_file, scopes) + creds = flow.run_local_server(port=0) + + token_path.write_text(creds.to_json(), encoding="utf-8") + + creds_json = json.loads(creds.to_json()) + + return Data(data=creds_json) diff --git a/src/backend/base/langflow/components/helpers/CreateList.py b/src/backend/base/langflow/components/helpers/CreateList.py deleted file mode 100644 index 678500b2381b..000000000000 --- a/src/backend/base/langflow/components/helpers/CreateList.py +++ /dev/null @@ -1,29 +0,0 @@ -from langflow.custom import Component -from langflow.inputs import StrInput -from langflow.schema import Data -from langflow.template import Output - - -class CreateListComponent(Component): - display_name = "Create List" - description = "Creates a list of texts." - icon = "list" - name = "CreateList" - - inputs = [ - StrInput( - name="texts", - display_name="Texts", - info="Enter one or more texts.", - is_list=True, - ), - ] - - outputs = [ - Output(display_name="Data List", name="list", method="create_list"), - ] - - def create_list(self) -> list[Data]: - data = [Data(text=text) for text in self.texts] - self.status = data - return data diff --git a/src/backend/base/langflow/components/helpers/CustomComponent.py b/src/backend/base/langflow/components/helpers/CustomComponent.py deleted file mode 100644 index df5db7dc0137..000000000000 --- a/src/backend/base/langflow/components/helpers/CustomComponent.py +++ /dev/null @@ -1,25 +0,0 @@ -# from langflow.field_typing import Data -from langflow.custom import Component -from langflow.io import MessageTextInput, Output -from langflow.schema import Data - - -class CustomComponent(Component): - display_name = "Custom Component" - description = "Use as a template to create your own component." - documentation: str = "http://docs.langflow.org/components/custom" - icon = "custom_components" - name = "CustomComponent" - - inputs = [ - MessageTextInput(name="input_value", display_name="Input Value", value="Hello, World!"), - ] - - outputs = [ - Output(display_name="Output", name="output", method="build_output"), - ] - - def build_output(self) -> Data: - data = Data(value=self.input_value) - self.status = data - return data diff --git a/src/backend/base/langflow/components/helpers/FilterData.py b/src/backend/base/langflow/components/helpers/FilterData.py deleted file mode 100644 index 3832e4e84df4..000000000000 --- a/src/backend/base/langflow/components/helpers/FilterData.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import List - -from langflow.custom import Component -from langflow.io import DataInput, MessageTextInput, Output -from langflow.schema import Data - - -class FilterDataComponent(Component): - display_name = "Filter Data" - description = "Filters a Data object based on a list of keys." - icon = "filter" - beta = True - name = "FilterData" - - inputs = [ - DataInput( - name="data", - display_name="Data", - info="Data object to filter.", - ), - MessageTextInput( - name="filter_criteria", - display_name="Filter Criteria", - info="List of keys to filter by.", - is_list=True, - ), - ] - - outputs = [ - Output(display_name="Filtered Data", name="filtered_data", method="filter_data"), - ] - - def filter_data(self) -> Data: - filter_criteria: List[str] = self.filter_criteria - data = self.data.data if isinstance(self.data, Data) else {} - - # Filter the data - filtered = {key: value for key, value in data.items() if key in filter_criteria} - - # Create a new Data object with the filtered data - filtered_data = Data(data=filtered) - self.status = filtered_data - return filtered_data diff --git a/src/backend/base/langflow/components/helpers/IDGenerator.py b/src/backend/base/langflow/components/helpers/IDGenerator.py deleted file mode 100644 index 5dd5ebe90e59..000000000000 --- a/src/backend/base/langflow/components/helpers/IDGenerator.py +++ /dev/null @@ -1,29 +0,0 @@ -import uuid -from typing import Any, Optional - -from langflow.custom import CustomComponent -from langflow.schema.dotdict import dotdict - - -class IDGeneratorComponent(CustomComponent): - display_name = "ID Generator" - description = "Generates a unique ID." - name = "IDGenerator" - - def update_build_config( # type: ignore - self, build_config: dotdict, field_value: Any, field_name: Optional[str] = None - ): - if field_name == "unique_id": - build_config[field_name]["value"] = str(uuid.uuid4()) - return build_config - - def build_config(self): - return { - "unique_id": { - "display_name": "Value", - "refresh_button": True, - } - } - - def build(self, unique_id: str) -> str: - return unique_id diff --git a/src/backend/base/langflow/components/helpers/Memory.py b/src/backend/base/langflow/components/helpers/Memory.py deleted file mode 100644 index 0173f9cfb8e4..000000000000 --- a/src/backend/base/langflow/components/helpers/Memory.py +++ /dev/null @@ -1,122 +0,0 @@ -from langchain.memory import ConversationBufferMemory - -from langflow.custom import Component -from langflow.field_typing import BaseChatMemory -from langflow.helpers.data import data_to_text -from langflow.inputs import HandleInput -from langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output -from langflow.memory import LCBuiltinChatMemory, get_messages -from langflow.schema import Data -from langflow.schema.message import Message -from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER - - -class MemoryComponent(Component): - display_name = "Chat Memory" - description = "Retrieves stored chat messages from Langflow tables or an external memory." - icon = "message-square-more" - name = "Memory" - - inputs = [ - HandleInput( - name="memory", - display_name="External Memory", - input_types=["BaseChatMessageHistory"], - info="Retrieve messages from an external memory. If empty, it will use the Langflow tables.", - ), - DropdownInput( - name="sender", - display_name="Sender Type", - options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, "Machine and User"], - value="Machine and User", - info="Filter by sender type.", - advanced=True, - ), - MessageTextInput( - name="sender_name", - display_name="Sender Name", - info="Filter by sender name.", - advanced=True, - ), - IntInput( - name="n_messages", - display_name="Number of Messages", - value=100, - info="Number of messages to retrieve.", - advanced=True, - ), - MessageTextInput( - name="session_id", - display_name="Session ID", - info="The session ID of the chat. If empty, the current session ID parameter will be used.", - advanced=True, - ), - DropdownInput( - name="order", - display_name="Order", - options=["Ascending", "Descending"], - value="Ascending", - info="Order of the messages.", - advanced=True, - ), - MultilineInput( - name="template", - display_name="Template", - info="The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", - value="{sender_name}: {text}", - advanced=True, - ), - ] - - outputs = [ - Output(display_name="Messages (Data)", name="messages", method="retrieve_messages"), - Output(display_name="Messages (Text)", name="messages_text", method="retrieve_messages_as_text"), - Output(display_name="Memory", name="lc_memory", method="build_lc_memory"), - ] - - def retrieve_messages(self) -> Data: - sender = self.sender - sender_name = self.sender_name - session_id = self.session_id - n_messages = self.n_messages - order = "DESC" if self.order == "Descending" else "ASC" - - if sender == "Machine and User": - sender = None - - if self.memory: - # override session_id - self.memory.session_id = session_id - - stored = self.memory.messages - # langchain memories are supposed to return messages in ascending order - if order == "DESC": - stored = stored[::-1] - if n_messages: - stored = stored[:n_messages] - stored = [Message.from_lc_message(m) for m in stored] - if sender: - expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER - stored = [m for m in stored if m.type == expected_type] - else: - stored = get_messages( - sender=sender, - sender_name=sender_name, - session_id=session_id, - limit=n_messages, - order=order, - ) - self.status = stored - return stored - - def retrieve_messages_as_text(self) -> Message: - stored_text = data_to_text(self.template, self.retrieve_messages()) - self.status = stored_text - return Message(text=stored_text) - - def build_lc_memory(self) -> BaseChatMemory: - if self.memory: - chat_memory = self.memory - else: - chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id) - return ConversationBufferMemory(chat_memory=chat_memory) diff --git a/src/backend/base/langflow/components/helpers/MergeData.py b/src/backend/base/langflow/components/helpers/MergeData.py deleted file mode 100644 index 9d4847f0868c..000000000000 --- a/src/backend/base/langflow/components/helpers/MergeData.py +++ /dev/null @@ -1,27 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class MergeDataComponent(CustomComponent): - display_name = "Merge Data" - description = "Combines multiple data sources into a single unified Data object." - beta: bool = True - name = "MergeData" - - field_config = { - "data": {"display_name": "Data"}, - } - - def build(self, data: list[Data]) -> Data: - if not data: - return Data() - if len(data) == 1: - return data[0] - merged_data = Data() - for value in data: - if merged_data is None: - merged_data = value - else: - merged_data += value - self.status = merged_data - return merged_data diff --git a/src/backend/base/langflow/components/helpers/ParseData.py b/src/backend/base/langflow/components/helpers/ParseData.py deleted file mode 100644 index 62f757d49cfd..000000000000 --- a/src/backend/base/langflow/components/helpers/ParseData.py +++ /dev/null @@ -1,34 +0,0 @@ -from langflow.custom import Component -from langflow.helpers.data import data_to_text -from langflow.io import DataInput, MultilineInput, Output, StrInput -from langflow.schema.message import Message - - -class ParseDataComponent(Component): - display_name = "Parse Data" - description = "Convert Data into plain text following a specified template." - icon = "braces" - name = "ParseData" - - inputs = [ - DataInput(name="data", display_name="Data", info="The data to convert to text."), - MultilineInput( - name="template", - display_name="Template", - info="The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - value="{text}", - ), - StrInput(name="sep", display_name="Separator", advanced=True, value="\n"), - ] - - outputs = [ - Output(display_name="Text", name="text", method="parse_data"), - ] - - def parse_data(self) -> Message: - data = self.data if isinstance(self.data, list) else [self.data] - template = self.template - - result_string = data_to_text(template, data, sep=self.sep) - self.status = result_string - return Message(text=result_string) diff --git a/src/backend/base/langflow/components/helpers/SequentialTask.py b/src/backend/base/langflow/components/helpers/SequentialTask.py deleted file mode 100644 index 7048a68c778e..000000000000 --- a/src/backend/base/langflow/components/helpers/SequentialTask.py +++ /dev/null @@ -1,72 +0,0 @@ -from langflow.base.agents.crewai.tasks import SequentialTask -from langflow.custom import Component -from langflow.io import BoolInput, HandleInput, MultilineInput, Output - - -class SequentialTaskComponent(Component): - display_name: str = "Sequential Task" - description: str = "Each task must have a description, an expected output and an agent responsible for execution." - icon = "CrewAI" - inputs = [ - MultilineInput( - name="task_description", - display_name="Description", - info="Descriptive text detailing task's purpose and execution.", - ), - MultilineInput( - name="expected_output", - display_name="Expected Output", - info="Clear definition of expected task outcome.", - ), - HandleInput( - name="tools", - display_name="Tools", - input_types=["Tool"], - is_list=True, - info="List of tools/resources limited for task execution. Uses the Agent tools by default.", - required=False, - advanced=True, - ), - HandleInput( - name="agent", - display_name="Agent", - input_types=["Agent"], - info="CrewAI Agent that will perform the task", - required=True, - ), - HandleInput( - name="task", - display_name="Task", - input_types=["SequentialTask"], - info="CrewAI Task that will perform the task", - ), - BoolInput( - name="async_execution", - display_name="Async Execution", - value=True, - advanced=True, - info="Boolean flag indicating asynchronous task execution.", - ), - ] - - outputs = [ - Output(display_name="Task", name="task_output", method="build_task"), - ] - - def build_task(self) -> list[SequentialTask]: - tasks: list[SequentialTask] = [] - task = SequentialTask( - description=self.task_description, - expected_output=self.expected_output, - tools=self.agent.tools, - async_execution=False, - agent=self.agent, - ) - tasks.append(task) - self.status = task - if self.task: - if isinstance(self.task, list) and all(isinstance(task, SequentialTask) for task in self.task): - tasks = self.task + tasks - elif isinstance(self.task, SequentialTask): - tasks = [self.task] + tasks - return tasks diff --git a/src/backend/base/langflow/components/helpers/SplitText.py b/src/backend/base/langflow/components/helpers/SplitText.py deleted file mode 100644 index 88f911057e91..000000000000 --- a/src/backend/base/langflow/components/helpers/SplitText.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List - -from langchain_text_splitters import CharacterTextSplitter - -from langflow.custom import Component -from langflow.io import HandleInput, IntInput, MessageTextInput, Output -from langflow.schema import Data -from langflow.utils.util import unescape_string - - -class SplitTextComponent(Component): - display_name: str = "Split Text" - description: str = "Split text into chunks based on specified criteria." - icon = "scissors-line-dashed" - name = "SplitText" - - inputs = [ - HandleInput( - name="data_inputs", - display_name="Data Inputs", - info="The data to split.", - input_types=["Data"], - is_list=True, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="Number of characters to overlap between chunks.", - value=200, - ), - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum number of characters in each chunk.", - value=1000, - ), - MessageTextInput( - name="separator", - display_name="Separator", - info="The character to split on. Defaults to newline.", - value="\n", - ), - ] - - outputs = [ - Output(display_name="Chunks", name="chunks", method="split_text"), - ] - - def _docs_to_data(self, docs): - data = [] - for doc in docs: - data.append(Data(text=doc.page_content, data=doc.metadata)) - return data - - def split_text(self) -> List[Data]: - separator = unescape_string(self.separator) - - documents = [] - for _input in self.data_inputs: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - - splitter = CharacterTextSplitter( - chunk_overlap=self.chunk_overlap, - chunk_size=self.chunk_size, - separator=separator, - ) - docs = splitter.split_documents(documents) - data = self._docs_to_data(docs) - self.status = data - return data diff --git a/src/backend/base/langflow/components/helpers/StoreMessage.py b/src/backend/base/langflow/components/helpers/StoreMessage.py deleted file mode 100644 index c43e31570632..000000000000 --- a/src/backend/base/langflow/components/helpers/StoreMessage.py +++ /dev/null @@ -1,67 +0,0 @@ -from langflow.custom import Component -from langflow.inputs import MessageInput, StrInput, HandleInput -from langflow.schema.message import Message -from langflow.template import Output -from langflow.memory import get_messages, store_message -from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI - - -class StoreMessageComponent(Component): - display_name = "Store Message" - description = "Stores a chat message or text into Langflow tables or an external memory." - icon = "save" - name = "StoreMessage" - - inputs = [ - MessageInput(name="message", display_name="Message", info="The chat message to be stored.", required=True), - HandleInput( - name="memory", - display_name="External Memory", - input_types=["BaseChatMessageHistory"], - info="The external memory to store the message. If empty, it will use the Langflow tables.", - ), - StrInput( - name="sender", - display_name="Sender", - info="The sender of the message. Might be Machine or User. If empty, the current sender parameter will be used.", - advanced=True, - ), - StrInput( - name="sender_name", - display_name="Sender Name", - info="The name of the sender. Might be AI or User. If empty, the current sender parameter will be used.", - advanced=True, - ), - StrInput( - name="session_id", - display_name="Session ID", - info="The session ID of the chat. If empty, the current session ID parameter will be used.", - value="", - ), - ] - - outputs = [ - Output(display_name="Stored Messages", name="stored_messages", method="store_message"), - ] - - def store_message(self) -> Message: - message = self.message - - message.session_id = self.session_id or message.session_id - message.sender = self.sender or message.sender or MESSAGE_SENDER_AI - message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI - - if self.memory: - # override session_id - self.memory.session_id = message.session_id - lc_message = message.to_lc_message() - self.memory.add_messages([lc_message]) - stored = self.memory.messages - stored = [Message.from_lc_message(m) for m in stored] - if message.sender: - stored = [m for m in stored if m.sender == message.sender] - else: - store_message(message, flow_id=self.graph.flow_id) - stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender) - self.status = stored - return stored diff --git a/src/backend/base/langflow/components/helpers/__init__.py b/src/backend/base/langflow/components/helpers/__init__.py index fcc9e83ee0f8..f545a46cc577 100644 --- a/src/backend/base/langflow/components/helpers/__init__.py +++ b/src/backend/base/langflow/components/helpers/__init__.py @@ -1,25 +1,17 @@ -from .CombineText import CombineTextComponent -from .CustomComponent import CustomComponent -from .FilterData import FilterDataComponent -from .IDGenerator import IDGeneratorComponent -from .Memory import MemoryComponent -from .MergeData import MergeDataComponent -from .ParseData import ParseDataComponent -from .SplitText import SplitTextComponent -from .StoreMessage import StoreMessageComponent -from .CreateList import CreateListComponent - +from .create_list import CreateListComponent +from .current_date import CurrentDateComponent +from .id_generator import IDGeneratorComponent +from .memory import MemoryComponent +from .output_parser import OutputParserComponent +from .store_message import StoreMessageComponent +from .structured_output import StructuredOutputComponent __all__ = [ "CreateListComponent", - "CombineTextComponent", - "CustomComponent", - "FilterDataComponent", + "CurrentDateComponent", "IDGeneratorComponent", - "MemoryComponent", - "MergeDataComponent", - "ParseDataComponent", - "SplitTextComponent", + "OutputParserComponent", + "StructuredOutputComponent", "StoreMessageComponent", - "ListComponent", + "MemoryComponent", ] diff --git a/src/backend/base/langflow/components/helpers/create_list.py b/src/backend/base/langflow/components/helpers/create_list.py new file mode 100644 index 000000000000..a978ffe1f955 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/create_list.py @@ -0,0 +1,30 @@ +from langflow.custom import Component +from langflow.inputs import StrInput +from langflow.schema import Data +from langflow.template import Output + + +class CreateListComponent(Component): + display_name = "Create List" + description = "Creates a list of texts." + icon = "list" + name = "CreateList" + legacy = True + + inputs = [ + StrInput( + name="texts", + display_name="Texts", + info="Enter one or more texts.", + is_list=True, + ), + ] + + outputs = [ + Output(display_name="Data List", name="list", method="create_list"), + ] + + def create_list(self) -> list[Data]: + data = [Data(text=text) for text in self.texts] + self.status = data + return data diff --git a/src/backend/base/langflow/components/helpers/current_date.py b/src/backend/base/langflow/components/helpers/current_date.py new file mode 100644 index 000000000000..0ec7b7cc115b --- /dev/null +++ b/src/backend/base/langflow/components/helpers/current_date.py @@ -0,0 +1,77 @@ +from datetime import datetime +from zoneinfo import ZoneInfo + +from loguru import logger + +from langflow.custom import Component +from langflow.io import DropdownInput, Output +from langflow.schema.message import Message + + +class CurrentDateComponent(Component): + display_name = "Current Date" + description = "Returns the current date and time in the selected timezone." + icon = "clock" + beta = True + name = "CurrentDate" + + inputs = [ + DropdownInput( + name="timezone", + display_name="Timezone", + options=[ + "UTC", + "US/Eastern", + "US/Central", + "US/Mountain", + "US/Pacific", + "Europe/London", + "Europe/Paris", + "Europe/Berlin", + "Europe/Moscow", + "Asia/Tokyo", + "Asia/Shanghai", + "Asia/Singapore", + "Asia/Dubai", + "Australia/Sydney", + "Australia/Melbourne", + "Pacific/Auckland", + "America/Sao_Paulo", + "America/Mexico_City", + "America/Toronto", + "America/Vancouver", + "Africa/Cairo", + "Africa/Johannesburg", + "Atlantic/Reykjavik", + "Indian/Maldives", + "America/Bogota", + "America/Lima", + "America/Santiago", + "America/Buenos_Aires", + "America/Caracas", + "America/La_Paz", + "America/Montevideo", + "America/Asuncion", + "America/Cuiaba", + ], + value="UTC", + info="Select the timezone for the current date and time.", + tool_mode=True, + ), + ] + outputs = [ + Output(display_name="Current Date", name="current_date", method="get_current_date"), + ] + + def get_current_date(self) -> Message: + try: + tz = ZoneInfo(self.timezone) + current_date = datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S %Z") + result = f"Current date and time in {self.timezone}: {current_date}" + self.status = result + return Message(text=result) + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error getting current date") + error_message = f"Error: {e}" + self.status = error_message + return Message(text=error_message) diff --git a/src/backend/base/langflow/components/helpers/id_generator.py b/src/backend/base/langflow/components/helpers/id_generator.py new file mode 100644 index 000000000000..fd9393841914 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/id_generator.py @@ -0,0 +1,40 @@ +import uuid +from typing import Any + +from typing_extensions import override + +from langflow.custom import Component +from langflow.io import MessageTextInput, Output +from langflow.schema import dotdict +from langflow.schema.message import Message + + +class IDGeneratorComponent(Component): + display_name = "ID Generator" + description = "Generates a unique ID." + icon = "fingerprint" + name = "IDGenerator" + + inputs = [ + MessageTextInput( + name="unique_id", + display_name="Value", + info="The generated unique ID.", + refresh_button=True, + ), + ] + + outputs = [ + Output(display_name="ID", name="id", method="generate_id"), + ] + + @override + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "unique_id": + build_config[field_name]["value"] = str(uuid.uuid4()) + return build_config + + def generate_id(self) -> Message: + unique_id = self.unique_id or str(uuid.uuid4()) + self.status = f"Generated ID: {unique_id}" + return Message(text=unique_id) diff --git a/src/backend/base/langflow/components/helpers/memory.py b/src/backend/base/langflow/components/helpers/memory.py new file mode 100644 index 000000000000..323c2c8c46e7 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/memory.py @@ -0,0 +1,119 @@ +from langchain.memory import ConversationBufferMemory + +from langflow.custom import Component +from langflow.field_typing import BaseChatMemory +from langflow.helpers.data import data_to_text +from langflow.inputs import HandleInput +from langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output +from langflow.memory import LCBuiltinChatMemory, get_messages +from langflow.schema import Data +from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER + + +class MemoryComponent(Component): + display_name = "Message History" + description = "Retrieves stored chat messages from Langflow tables or an external memory." + icon = "message-square-more" + name = "Memory" + + inputs = [ + HandleInput( + name="memory", + display_name="External Memory", + input_types=["BaseChatMessageHistory"], + info="Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + ), + DropdownInput( + name="sender", + display_name="Sender Type", + options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, "Machine and User"], + value="Machine and User", + info="Filter by sender type.", + advanced=True, + ), + MessageTextInput( + name="sender_name", + display_name="Sender Name", + info="Filter by sender name.", + advanced=True, + ), + IntInput( + name="n_messages", + display_name="Number of Messages", + value=100, + info="Number of messages to retrieve.", + advanced=True, + ), + MessageTextInput( + name="session_id", + display_name="Session ID", + info="The session ID of the chat. If empty, the current session ID parameter will be used.", + advanced=True, + ), + DropdownInput( + name="order", + display_name="Order", + options=["Ascending", "Descending"], + value="Ascending", + info="Order of the messages.", + advanced=True, + ), + MultilineInput( + name="template", + display_name="Template", + info="The template to use for formatting the data. " + "It can contain the keys {text}, {sender} or any other key in the message data.", + value="{sender_name}: {text}", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Data", name="messages", method="retrieve_messages"), + Output(display_name="Text", name="messages_text", method="retrieve_messages_as_text"), + ] + + def retrieve_messages(self) -> Data: + sender = self.sender + sender_name = self.sender_name + session_id = self.session_id + n_messages = self.n_messages + order = "DESC" if self.order == "Descending" else "ASC" + + if sender == "Machine and User": + sender = None + + if self.memory: + # override session_id + self.memory.session_id = session_id + + stored = self.memory.messages + # langchain memories are supposed to return messages in ascending order + if order == "DESC": + stored = stored[::-1] + if n_messages: + stored = stored[:n_messages] + stored = [Message.from_lc_message(m) for m in stored] + if sender: + expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER + stored = [m for m in stored if m.type == expected_type] + else: + stored = get_messages( + sender=sender, + sender_name=sender_name, + session_id=session_id, + limit=n_messages, + order=order, + ) + self.status = stored + return stored + + def retrieve_messages_as_text(self) -> Message: + stored_text = data_to_text(self.template, self.retrieve_messages()) + self.status = stored_text + return Message(text=stored_text) + + def build_lc_memory(self) -> BaseChatMemory: + chat_memory = self.memory or LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id) + return ConversationBufferMemory(chat_memory=chat_memory) diff --git a/src/backend/base/langflow/components/helpers/output_parser.py b/src/backend/base/langflow/components/helpers/output_parser.py new file mode 100644 index 000000000000..7fa3f5495f85 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/output_parser.py @@ -0,0 +1,45 @@ +from langchain_core.output_parsers import CommaSeparatedListOutputParser + +from langflow.custom.custom_component.component import Component +from langflow.field_typing.constants import OutputParser +from langflow.io import DropdownInput, Output +from langflow.schema.message import Message + + +class OutputParserComponent(Component): + display_name = "Output Parser" + description = "Transforms the output of an LLM into a specified format." + icon = "type" + name = "OutputParser" + legacy = True + + inputs = [ + DropdownInput( + name="parser_type", + display_name="Parser", + options=["CSV"], + value="CSV", + ), + ] + + outputs = [ + Output( + display_name="Format Instructions", + name="format_instructions", + info="Pass to a prompt template to include formatting instructions for LLM responses.", + method="format_instructions", + ), + Output(display_name="Output Parser", name="output_parser", method="build_parser"), + ] + + def build_parser(self) -> OutputParser: + if self.parser_type == "CSV": + return CommaSeparatedListOutputParser() + msg = "Unsupported or missing parser" + raise ValueError(msg) + + def format_instructions(self) -> Message: + if self.parser_type == "CSV": + return Message(text=CommaSeparatedListOutputParser().get_format_instructions()) + msg = "Unsupported or missing parser" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/helpers/store_message.py b/src/backend/base/langflow/components/helpers/store_message.py new file mode 100644 index 000000000000..388d8a4769ee --- /dev/null +++ b/src/backend/base/langflow/components/helpers/store_message.py @@ -0,0 +1,70 @@ +from langflow.custom import Component +from langflow.inputs import HandleInput, MessageInput +from langflow.inputs.inputs import MessageTextInput +from langflow.memory import get_messages, store_message +from langflow.schema.message import Message +from langflow.template import Output +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI + + +class StoreMessageComponent(Component): + display_name = "Store Message" + description = "Stores a chat message or text into Langflow tables or an external memory." + icon = "save" + name = "StoreMessage" + + inputs = [ + MessageInput(name="message", display_name="Message", info="The chat message to be stored.", required=True), + HandleInput( + name="memory", + display_name="External Memory", + input_types=["BaseChatMessageHistory"], + info="The external memory to store the message. If empty, it will use the Langflow tables.", + ), + MessageTextInput( + name="sender", + display_name="Sender", + info="The sender of the message. Might be Machine or User. " + "If empty, the current sender parameter will be used.", + advanced=True, + ), + MessageTextInput( + name="sender_name", + display_name="Sender Name", + info="The name of the sender. Might be AI or User. If empty, the current sender parameter will be used.", + advanced=True, + ), + MessageTextInput( + name="session_id", + display_name="Session ID", + info="The session ID of the chat. If empty, the current session ID parameter will be used.", + value="", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Stored Messages", name="stored_messages", method="store_message"), + ] + + def store_message(self) -> Message: + message = self.message + + message.session_id = self.session_id or message.session_id + message.sender = self.sender or message.sender or MESSAGE_SENDER_AI + message.sender_name = self.sender_name or message.sender_name or MESSAGE_SENDER_NAME_AI + + if self.memory: + # override session_id + self.memory.session_id = message.session_id + lc_message = message.to_lc_message() + self.memory.add_messages([lc_message]) + stored = self.memory.messages + stored = [Message.from_lc_message(m) for m in stored] + if message.sender: + stored = [m for m in stored if m.sender == message.sender] + else: + store_message(message, flow_id=self.graph.flow_id) + stored = get_messages(session_id=message.session_id, sender_name=message.sender_name, sender=message.sender) + self.status = stored + return stored diff --git a/src/backend/base/langflow/components/helpers/structured_output.py b/src/backend/base/langflow/components/helpers/structured_output.py new file mode 100644 index 000000000000..2af859710ea1 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/structured_output.py @@ -0,0 +1,113 @@ +from typing import cast + +from pydantic import BaseModel, Field, create_model + +from langflow.base.models.chat_result import get_chat_result +from langflow.custom import Component +from langflow.field_typing.constants import LanguageModel +from langflow.helpers.base_model import build_model_from_schema +from langflow.io import BoolInput, HandleInput, MessageTextInput, Output, StrInput, TableInput +from langflow.schema.data import Data + + +class StructuredOutputComponent(Component): + display_name = "Structured Output" + description = ( + "Transforms LLM responses into **structured data formats**. Ideal for extracting specific information " + "or creating consistent outputs." + ) + icon = "braces" + + inputs = [ + HandleInput( + name="llm", + display_name="Language Model", + info="The language model to use to generate the structured output.", + input_types=["LanguageModel"], + ), + MessageTextInput(name="input_value", display_name="Input message"), + StrInput( + name="schema_name", + display_name="Schema Name", + info="Provide a name for the output data schema.", + ), + TableInput( + name="output_schema", + display_name="Output Schema", + info="Define the structure and data types for the model's output.", + table_schema=[ + { + "name": "name", + "display_name": "Name", + "type": "str", + "description": "Specify the name of the output field.", + }, + { + "name": "description", + "display_name": "Description", + "type": "str", + "description": "Describe the purpose of the output field.", + }, + { + "name": "type", + "display_name": "Type", + "type": "str", + "description": ( + "Indicate the data type of the output field " "(e.g., str, int, float, bool, list, dict)." + ), + "default": "text", + }, + { + "name": "multiple", + "display_name": "Multiple", + "type": "boolean", + "description": "Set to True if this output field should be a list of the specified type.", + "default": "False", + }, + ], + ), + BoolInput( + name="multiple", + display_name="Generate Multiple", + info="Set to True if the model should generate a list of outputs instead of a single output.", + ), + ] + + outputs = [ + Output(name="structured_output", display_name="Structured Output", method="build_structured_output"), + ] + + def build_structured_output(self) -> Data: + if not hasattr(self.llm, "with_structured_output"): + msg = "Language model does not support structured output." + raise TypeError(msg) + if not self.output_schema: + msg = "Output schema cannot be empty" + raise ValueError(msg) + + _output_model = build_model_from_schema(self.output_schema) + if self.multiple: + output_model = create_model( + self.schema_name, + objects=(list[_output_model], Field(description=f"A list of {self.schema_name}.")), # type: ignore[valid-type] + ) + else: + output_model = _output_model + try: + llm_with_structured_output = cast(LanguageModel, self.llm).with_structured_output(schema=output_model) # type: ignore[valid-type, attr-defined] + + except NotImplementedError as exc: + msg = f"{self.llm.__class__.__name__} does not support structured output." + raise TypeError(msg) from exc + config_dict = { + "run_name": self.display_name, + "project_name": self.get_project_name(), + "callbacks": self.get_langchain_callbacks(), + } + output = get_chat_result(runnable=llm_with_structured_output, input_value=self.input_value, config=config_dict) + if isinstance(output, BaseModel): + output_dict = output.model_dump() + else: + msg = f"Output should be a Pydantic BaseModel, got {type(output)} ({output})" + raise TypeError(msg) + return Data(data=output_dict) diff --git a/src/backend/base/langflow/components/inputs/ChatInput.py b/src/backend/base/langflow/components/inputs/ChatInput.py deleted file mode 100644 index 28aa220a0f2e..000000000000 --- a/src/backend/base/langflow/components/inputs/ChatInput.py +++ /dev/null @@ -1,86 +0,0 @@ -from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES -from langflow.base.io.chat import ChatComponent -from langflow.inputs import BoolInput -from langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output -from langflow.memory import store_message -from langflow.schema.message import Message -from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER - - -class ChatInput(ChatComponent): - display_name = "Chat Input" - description = "Get chat inputs from the Playground." - icon = "ChatInput" - name = "ChatInput" - - inputs = [ - MultilineInput( - name="input_value", - display_name="Text", - value="", - info="Message to be passed as input.", - ), - BoolInput( - name="should_store_message", - display_name="Store Messages", - info="Store the message in the history.", - value=True, - advanced=True, - ), - DropdownInput( - name="sender", - display_name="Sender Type", - options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], - value=MESSAGE_SENDER_USER, - info="Type of sender.", - advanced=True, - ), - MessageTextInput( - name="sender_name", - display_name="Sender Name", - info="Name of the sender.", - value=MESSAGE_SENDER_NAME_USER, - advanced=True, - ), - MessageTextInput( - name="session_id", - display_name="Session ID", - info="The session ID of the chat. If empty, the current session ID parameter will be used.", - advanced=True, - ), - FileInput( - name="files", - display_name="Files", - file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES, - info="Files to be sent with the message.", - advanced=True, - is_list=True, - ), - ] - outputs = [ - Output(display_name="Message", name="message", method="message_response"), - ] - - def message_response(self) -> Message: - message = Message( - text=self.input_value, - sender=self.sender, - sender_name=self.sender_name, - session_id=self.session_id, - files=self.files, - ) - - if ( - self.session_id - and isinstance(message, Message) - and isinstance(message.text, str) - and self.should_store_message - ): - store_message( - message, - flow_id=self.graph.flow_id, - ) - self.message.value = message - - self.status = message - return message diff --git a/src/backend/base/langflow/components/inputs/TextInput.py b/src/backend/base/langflow/components/inputs/TextInput.py deleted file mode 100644 index 6495f9ab63f5..000000000000 --- a/src/backend/base/langflow/components/inputs/TextInput.py +++ /dev/null @@ -1,27 +0,0 @@ -from langflow.base.io.text import TextComponent -from langflow.io import MessageTextInput, Output -from langflow.schema.message import Message - - -class TextInputComponent(TextComponent): - display_name = "Text Input" - description = "Get text inputs from the Playground." - icon = "type" - name = "TextInput" - - inputs = [ - MessageTextInput( - name="input_value", - display_name="Text", - info="Text to be passed as input.", - ), - ] - outputs = [ - Output(display_name="Text", name="text", method="text_response"), - ] - - def text_response(self) -> Message: - message = Message( - text=self.input_value, - ) - return message diff --git a/src/backend/base/langflow/components/inputs/__init__.py b/src/backend/base/langflow/components/inputs/__init__.py index 48fc9a189913..311e23f759f1 100644 --- a/src/backend/base/langflow/components/inputs/__init__.py +++ b/src/backend/base/langflow/components/inputs/__init__.py @@ -1,4 +1,4 @@ -from .ChatInput import ChatInput -from .TextInput import TextInputComponent +from .chat import ChatInput +from .text import TextInputComponent __all__ = ["ChatInput", "TextInputComponent"] diff --git a/src/backend/base/langflow/components/inputs/chat.py b/src/backend/base/langflow/components/inputs/chat.py new file mode 100644 index 000000000000..be8e656cfb96 --- /dev/null +++ b/src/backend/base/langflow/components/inputs/chat.py @@ -0,0 +1,101 @@ +from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES +from langflow.base.io.chat import ChatComponent +from langflow.inputs import BoolInput +from langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output +from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER + + +class ChatInput(ChatComponent): + display_name = "Chat Input" + description = "Get chat inputs from the Playground." + icon = "MessagesSquare" + name = "ChatInput" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Text", + value="", + info="Message to be passed as input.", + ), + BoolInput( + name="should_store_message", + display_name="Store Messages", + info="Store the message in the history.", + value=True, + advanced=True, + ), + DropdownInput( + name="sender", + display_name="Sender Type", + options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], + value=MESSAGE_SENDER_USER, + info="Type of sender.", + advanced=True, + ), + MessageTextInput( + name="sender_name", + display_name="Sender Name", + info="Name of the sender.", + value=MESSAGE_SENDER_NAME_USER, + advanced=True, + ), + MessageTextInput( + name="session_id", + display_name="Session ID", + info="The session ID of the chat. If empty, the current session ID parameter will be used.", + advanced=True, + ), + FileInput( + name="files", + display_name="Files", + file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES, + info="Files to be sent with the message.", + advanced=True, + is_list=True, + ), + MessageTextInput( + name="background_color", + display_name="Background Color", + info="The background color of the icon.", + advanced=True, + ), + MessageTextInput( + name="chat_icon", + display_name="Icon", + info="The icon of the message.", + advanced=True, + ), + MessageTextInput( + name="text_color", + display_name="Text Color", + info="The text color of the name", + advanced=True, + ), + ] + outputs = [ + Output(display_name="Message", name="message", method="message_response"), + ] + + def message_response(self) -> Message: + _background_color = self.background_color + _text_color = self.text_color + _icon = self.chat_icon + message = Message( + text=self.input_value, + sender=self.sender, + sender_name=self.sender_name, + session_id=self.session_id, + files=self.files, + properties={"background_color": _background_color, "text_color": _text_color, "icon": _icon}, + ) + if self.session_id and isinstance(message, Message) and self.should_store_message: + stored_message = self.send_message( + message, + ) + self.message.value = stored_message + message = stored_message + + self.status = message + return message diff --git a/src/backend/base/langflow/components/inputs/text.py b/src/backend/base/langflow/components/inputs/text.py new file mode 100644 index 000000000000..6a2c5cb6ceb0 --- /dev/null +++ b/src/backend/base/langflow/components/inputs/text.py @@ -0,0 +1,26 @@ +from langflow.base.io.text import TextComponent +from langflow.io import MultilineInput, Output +from langflow.schema.message import Message + + +class TextInputComponent(TextComponent): + display_name = "Text Input" + description = "Get text inputs from the Playground." + icon = "type" + name = "TextInput" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Text", + info="Text to be passed as input.", + ), + ] + outputs = [ + Output(display_name="Text", name="text", method="text_response"), + ] + + def text_response(self) -> Message: + return Message( + text=self.input_value, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py deleted file mode 100644 index e152b797edc6..000000000000 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlCrawlApi.py +++ /dev/null @@ -1,90 +0,0 @@ -import uuid -from typing import Optional - -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class FirecrawlCrawlApi(CustomComponent): - display_name: str = "FirecrawlCrawlApi" - description: str = "Firecrawl Crawl API." - name = "FirecrawlCrawlApi" - - output_types: list[str] = ["Document"] - documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/crawl" - field_config = { - "api_key": { - "display_name": "API Key", - "field_type": "str", - "required": True, - "password": True, - "info": "The API key to use Firecrawl API.", - }, - "url": { - "display_name": "URL", - "field_type": "str", - "required": True, - "info": "The base URL to start crawling from.", - }, - "timeout": { - "display_name": "Timeout", - "field_type": "int", - "info": "The timeout in milliseconds.", - }, - "crawlerOptions": { - "display_name": "Crawler Options", - "info": "Options for the crawler behavior.", - }, - "pageOptions": { - "display_name": "Page Options", - "info": "The page options to send with the request.", - }, - "idempotency_key": { - "display_name": "Idempotency Key", - "field_type": "str", - "info": "Optional idempotency key to ensure unique requests.", - }, - } - - def build( - self, - api_key: str, - url: str, - timeout: int = 30000, - crawlerOptions: Optional[Data] = None, - pageOptions: Optional[Data] = None, - idempotency_key: Optional[str] = None, - ) -> Data: - try: - from firecrawl.firecrawl import FirecrawlApp # type: ignore - except ImportError: - raise ImportError( - "Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`." - ) - if crawlerOptions: - crawler_options_dict = crawlerOptions.__dict__["data"]["text"] - else: - crawler_options_dict = {} - - if pageOptions: - page_options_dict = pageOptions.__dict__["data"]["text"] - else: - page_options_dict = {} - - if not idempotency_key: - idempotency_key = str(uuid.uuid4()) - - app = FirecrawlApp(api_key=api_key) - crawl_result = app.crawl_url( - url, - { - "crawlerOptions": crawler_options_dict, - "pageOptions": page_options_dict, - }, - True, - int(timeout / 1000), - idempotency_key, - ) - - records = Data(data={"results": crawl_result}) - return records diff --git a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py b/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py deleted file mode 100644 index 9ed720a6770c..000000000000 --- a/src/backend/base/langflow/components/langchain_utilities/FirecrawlScrapeApi.py +++ /dev/null @@ -1,79 +0,0 @@ -from typing import Optional - -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class FirecrawlScrapeApi(CustomComponent): - display_name: str = "FirecrawlScrapeApi" - description: str = "Firecrawl Scrape API." - name = "FirecrawlScrapeApi" - - output_types: list[str] = ["Document"] - documentation: str = "https://docs.firecrawl.dev/api-reference/endpoint/scrape" - field_config = { - "api_key": { - "display_name": "API Key", - "field_type": "str", - "required": True, - "password": True, - "info": "The API key to use Firecrawl API.", - }, - "url": { - "display_name": "URL", - "field_type": "str", - "required": True, - "info": "The URL to scrape.", - }, - "timeout": { - "display_name": "Timeout", - "info": "Timeout in milliseconds for the request.", - "field_type": "int", - "default_value": 10000, - }, - "pageOptions": { - "display_name": "Page Options", - "info": "The page options to send with the request.", - }, - "extractorOptions": { - "display_name": "Extractor Options", - "info": "The extractor options to send with the request.", - }, - } - - def build( - self, - api_key: str, - url: str, - timeout: int = 10000, - pageOptions: Optional[Data] = None, - extractorOptions: Optional[Data] = None, - ) -> Data: - try: - from firecrawl.firecrawl import FirecrawlApp # type: ignore - except ImportError: - raise ImportError( - "Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`." - ) - if extractorOptions: - extractor_options_dict = extractorOptions.__dict__["data"]["text"] - else: - extractor_options_dict = {} - - if pageOptions: - page_options_dict = pageOptions.__dict__["data"]["text"] - else: - page_options_dict = {} - - app = FirecrawlApp(api_key=api_key) - results = app.scrape_url( - url, - { - "timeout": str(timeout), - "extractorOptions": extractor_options_dict, - "pageOptions": page_options_dict, - }, - ) - - record = Data(data=results) - return record diff --git a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py deleted file mode 100644 index e4d3300641d7..000000000000 --- a/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py +++ /dev/null @@ -1,48 +0,0 @@ -### JSON Document Builder - -# Build a Document containing a JSON object using a key and another Document page content. - -# **Params** - -# - **Key:** The key to use for the JSON object. -# - **Document:** The Document page to use for the JSON object. - -# **Output** - -# - **Document:** The Document containing the JSON object. - -from langchain_core.documents import Document - -from langflow.custom import CustomComponent -from langflow.services.database.models.base import orjson_dumps - - -class JSONDocumentBuilder(CustomComponent): - display_name: str = "JSON Document Builder" - description: str = "Build a Document containing a JSON object using a key and another Document page content." - name = "JSONDocumentBuilder" - - output_types: list[str] = ["Document"] - documentation: str = "https://docs.langflow.org/components/utilities#json-document-builder" - - field_config = { - "key": {"display_name": "Key"}, - "document": {"display_name": "Document"}, - } - - def build( - self, - key: str, - document: Document, - ) -> Document: - documents = None - if isinstance(document, list): - documents = [ - Document(page_content=orjson_dumps({key: doc.page_content}, indent_2=False)) for doc in document - ] - elif isinstance(document, Document): - documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False)) - else: - raise TypeError(f"Expected Document or list of Documents, got {type(document)}") - self.repr_value = documents - return documents diff --git a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py deleted file mode 100644 index 4d5104aabd00..000000000000 --- a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py +++ /dev/null @@ -1,26 +0,0 @@ -from langchain_community.utilities.sql_database import SQLDatabase -from langflow.custom import CustomComponent -from sqlalchemy import create_engine -from sqlalchemy.pool import StaticPool - - -class SQLDatabaseComponent(CustomComponent): - display_name = "SQLDatabase" - description = "SQL Database" - name = "SQLDatabase" - - def build_config(self): - return { - "uri": {"display_name": "URI", "info": "URI to the database."}, - } - - def clean_up_uri(self, uri: str) -> str: - if uri.startswith("postgres://"): - uri = uri.replace("postgres://", "postgresql://") - return uri.strip() - - def build(self, uri: str) -> SQLDatabase: - uri = self.clean_up_uri(uri) - # Create an engine using SQLAlchemy with StaticPool - engine = create_engine(uri, poolclass=StaticPool) - return SQLDatabase(engine) diff --git a/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py b/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py deleted file mode 100644 index 0cef340af328..000000000000 --- a/src/backend/base/langflow/components/langchain_utilities/SpiderTool.py +++ /dev/null @@ -1,126 +0,0 @@ -from spider.spider import Spider - -from langflow.base.langchain_utilities.spider_constants import MODES -from langflow.custom import Component -from langflow.io import BoolInput, DictInput, DropdownInput, IntInput, Output, SecretStrInput, StrInput -from langflow.schema import Data - - -class SpiderTool(Component): - display_name: str = "Spider Web Crawler & Scraper" - description: str = "Spider API for web crawling and scraping." - output_types: list[str] = ["Document"] - documentation: str = "https://spider.cloud/docs/api" - - inputs = [ - SecretStrInput( - name="spider_api_key", - display_name="Spider API Key", - required=True, - password=True, - info="The Spider API Key, get it from https://spider.cloud", - ), - StrInput( - name="url", - display_name="URL", - required=True, - info="The URL to scrape or crawl", - ), - DropdownInput( - name="mode", - display_name="Mode", - required=True, - options=MODES, - value=MODES[0], - info="The mode of operation: scrape or crawl", - ), - IntInput( - name="limit", - display_name="Limit", - info="The maximum amount of pages allowed to crawl per website. Set to 0 to crawl all pages.", - advanced=True, - ), - IntInput( - name="depth", - display_name="Depth", - info="The crawl limit for maximum depth. If 0, no limit will be applied.", - advanced=True, - ), - StrInput( - name="blacklist", - display_name="Blacklist", - info="Blacklist paths that you do not want to crawl. Use Regex patterns.", - advanced=True, - ), - StrInput( - name="whitelist", - display_name="Whitelist", - info="Whitelist paths that you want to crawl, ignoring all other routes. Use Regex patterns.", - advanced=True, - ), - BoolInput( - name="readability", - display_name="Use Readability", - info="Use readability to pre-process the content for reading.", - advanced=True, - ), - IntInput( - name="request_timeout", - display_name="Request Timeout", - info="Timeout for the request in seconds.", - advanced=True, - ), - BoolInput( - name="metadata", - display_name="Metadata", - info="Include metadata in the response.", - advanced=True, - ), - DictInput( - name="params", - display_name="Additional Parameters", - info="Additional parameters to pass to the API. If provided, other inputs will be ignored.", - ), - ] - - outputs = [ - Output(display_name="Markdown", name="content", method="crawl"), - ] - - def crawl(self) -> list[Data]: - if self.params: - parameters = self.params["data"] - else: - parameters = { - "limit": self.limit if self.limit else None, - "depth": self.depth if self.depth else None, - "blacklist": self.blacklist if self.blacklist else None, - "whitelist": self.whitelist if self.whitelist else None, - "readability": self.readability, - "request_timeout": self.request_timeout if self.request_timeout else None, - "metadata": self.metadata, - "return_format": "markdown", - } - - app = Spider(api_key=self.spider_api_key) - try: - if self.mode == "scrape": - parameters["limit"] = 1 - result = app.scrape_url(self.url, parameters) - elif self.mode == "crawl": - result = app.crawl_url(self.url, parameters) - else: - raise ValueError(f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'.") - except Exception as e: - raise Exception(f"Error: {str(e)}") - - records = [] - - for record in result: - if self.metadata: - records.append( - Data(data={"content": record["content"], "url": record["url"], "metadata": record["metadata"]}) - ) - else: - records.append(Data(data={"content": record["content"], "url": record["url"]})) - return records diff --git a/src/backend/base/langflow/components/langchain_utilities/__init__.py b/src/backend/base/langflow/components/langchain_utilities/__init__.py index e69de29bb2d1..16961327e55c 100644 --- a/src/backend/base/langflow/components/langchain_utilities/__init__.py +++ b/src/backend/base/langflow/components/langchain_utilities/__init__.py @@ -0,0 +1,57 @@ +from .character import CharacterTextSplitterComponent +from .conversation import ConversationChainComponent +from .csv import CSVAgentComponent +from .html_link_extractor import HtmlLinkExtractorComponent +from .json import JsonAgentComponent +from .json_document_builder import JSONDocumentBuilder +from .langchain_hub import LangChainHubPromptComponent +from .language_recursive import LanguageRecursiveTextSplitterComponent +from .language_semantic import SemanticTextSplitterComponent +from .llm_checker import LLMCheckerChainComponent +from .llm_math import LLMMathChainComponent +from .natural_language import NaturalLanguageTextSplitterComponent +from .openai_tools import OpenAIToolsAgentComponent +from .openapi import OpenAPIAgentComponent +from .recursive_character import RecursiveCharacterTextSplitterComponent +from .retrieval_qa import RetrievalQAComponent +from .runnable_executor import RunnableExecComponent +from .self_query import SelfQueryRetrieverComponent +from .spider import SpiderTool +from .sql import SQLAgentComponent +from .sql_database import SQLDatabaseComponent +from .sql_generator import SQLGeneratorComponent +from .tool_calling import ToolCallingAgentComponent +from .vector_store import VectoStoreRetrieverComponent +from .vector_store_info import VectorStoreInfoComponent +from .vector_store_router import VectorStoreRouterAgentComponent +from .xml import XMLAgentComponent + +__all__ = [ + "CharacterTextSplitterComponent", + "ConversationChainComponent", + "CSVAgentComponent", + "HtmlLinkExtractorComponent", + "JSONDocumentBuilder", + "JsonAgentComponent", + "LangChainHubPromptComponent", + "LanguageRecursiveTextSplitterComponent", + "LLMCheckerChainComponent", + "LLMMathChainComponent", + "NaturalLanguageTextSplitterComponent", + "OpenAIToolsAgentComponent", + "OpenAPIAgentComponent", + "RecursiveCharacterTextSplitterComponent", + "RetrievalQAComponent", + "RunnableExecComponent", + "SelfQueryRetrieverComponent", + "SpiderTool", + "SQLAgentComponent", + "SQLDatabaseComponent", + "SQLGeneratorComponent", + "ToolCallingAgentComponent", + "VectoStoreRetrieverComponent", + "VectorStoreInfoComponent", + "VectorStoreRouterAgentComponent", + "XMLAgentComponent", + "SemanticTextSplitterComponent", +] diff --git a/src/backend/base/langflow/components/langchain_utilities/character.py b/src/backend/base/langflow/components/langchain_utilities/character.py new file mode 100644 index 000000000000..0dff4f13fe3b --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/character.py @@ -0,0 +1,52 @@ +from typing import Any + +from langchain_text_splitters import CharacterTextSplitter, TextSplitter + +from langflow.base.textsplitters.model import LCTextSplitterComponent +from langflow.inputs import DataInput, IntInput, MessageTextInput +from langflow.utils.util import unescape_string + + +class CharacterTextSplitterComponent(LCTextSplitterComponent): + display_name = "CharacterTextSplitter" + description = "Split text by number of characters." + documentation = "https://docs.langflow.org/components/text-splitters#charactertextsplitter" + name = "CharacterTextSplitter" + icon = "LangChain" + + inputs = [ + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum length of each chunk.", + value=1000, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="The amount of overlap between chunks.", + value=200, + ), + DataInput( + name="data_input", + display_name="Input", + info="The texts to split.", + input_types=["Document", "Data"], + ), + MessageTextInput( + name="separator", + display_name="Separator", + info='The characters to split on.\nIf left empty defaults to "\\n\\n".', + ), + ] + + def get_data_input(self) -> Any: + return self.data_input + + def build_text_splitter(self) -> TextSplitter: + separator = unescape_string(self.separator) if self.separator else "\n\n" + return CharacterTextSplitter( + chunk_overlap=self.chunk_overlap, + chunk_size=self.chunk_size, + separator=separator, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/conversation.py b/src/backend/base/langflow/components/langchain_utilities/conversation.py new file mode 100644 index 000000000000..6e9cbaa60096 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/conversation.py @@ -0,0 +1,52 @@ +from langchain.chains import ConversationChain + +from langflow.base.chains.model import LCChainComponent +from langflow.field_typing import Message +from langflow.inputs import HandleInput, MultilineInput + + +class ConversationChainComponent(LCChainComponent): + display_name = "ConversationChain" + description = "Chain to have a conversation and load context from memory." + name = "ConversationChain" + legacy: bool = True + icon = "LangChain" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + info="The input value to pass to the chain.", + required=True, + ), + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + HandleInput( + name="memory", + display_name="Memory", + input_types=["BaseChatMemory"], + ), + ] + + def invoke_chain(self) -> Message: + if not self.memory: + chain = ConversationChain(llm=self.llm) + else: + chain = ConversationChain(llm=self.llm, memory=self.memory) + + result = chain.invoke( + {"input": self.input_value}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + if isinstance(result, dict): + result = result.get(chain.output_key, "") + + elif not isinstance(result, str): + result = result.get("response") + result = str(result) + self.status = result + return Message(text=result) diff --git a/src/backend/base/langflow/components/langchain_utilities/csv.py b/src/backend/base/langflow/components/langchain_utilities/csv.py new file mode 100644 index 000000000000..8fcb49278438 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/csv.py @@ -0,0 +1,87 @@ +from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent + +from langflow.base.agents.agent import LCAgentComponent +from langflow.field_typing import AgentExecutor +from langflow.inputs import DropdownInput, FileInput, HandleInput +from langflow.inputs.inputs import MessageTextInput +from langflow.schema.message import Message +from langflow.template.field.base import Output + + +class CSVAgentComponent(LCAgentComponent): + display_name = "CSVAgent" + description = "Construct a CSV agent from a CSV and tools." + documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv" + name = "CSVAgent" + icon = "LangChain" + + inputs = [ + *LCAgentComponent._base_inputs, + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + info="An LLM Model Object (It can be found in any LLM Component).", + ), + FileInput( + name="path", + display_name="File Path", + file_types=["csv"], + input_types=["str", "Message"], + required=True, + info="A CSV File or File Path.", + ), + DropdownInput( + name="agent_type", + display_name="Agent Type", + advanced=True, + options=["zero-shot-react-description", "openai-functions", "openai-tools"], + value="openai-tools", + ), + MessageTextInput( + name="input_value", + display_name="Text", + info="Text to be passed as input and extract info from the CSV File.", + ), + ] + + outputs = [ + Output(display_name="Response", name="response", method="build_agent_response"), + Output(display_name="Agent", name="agent", method="build_agent"), + ] + + def build_agent_response(self) -> Message: + agent_kwargs = { + "verbose": self.verbose, + "allow_dangerous_code": True, + } + + agent_csv = create_csv_agent( + llm=self.llm, + path=self.path, + agent_type=self.agent_type, + handle_parsing_errors=self.handle_parsing_errors, + **agent_kwargs, + ) + + result = agent_csv.invoke({"input": self.input_value}) + return Message(text=str(result["output"])) + + def build_agent(self) -> AgentExecutor: + agent_kwargs = { + "verbose": self.verbose, + "allow_dangerous_code": True, + } + + agent_csv = create_csv_agent( + llm=self.llm, + path=self.path, + agent_type=self.agent_type, + handle_parsing_errors=self.handle_parsing_errors, + **agent_kwargs, + ) + + self.status = Message(text=str(agent_csv)) + + return agent_csv diff --git a/src/backend/base/langflow/components/langchain_utilities/html_link_extractor.py b/src/backend/base/langflow/components/langchain_utilities/html_link_extractor.py new file mode 100644 index 000000000000..824b04ea16fb --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/html_link_extractor.py @@ -0,0 +1,34 @@ +from typing import Any + +from langchain_community.graph_vectorstores.extractors import HtmlLinkExtractor, LinkExtractorTransformer +from langchain_core.documents import BaseDocumentTransformer + +from langflow.base.document_transformers.model import LCDocumentTransformerComponent +from langflow.inputs import BoolInput, DataInput, StrInput + + +class HtmlLinkExtractorComponent(LCDocumentTransformerComponent): + display_name = "HTML Link Extractor" + description = "Extract hyperlinks from HTML content." + documentation = "https://python.langchain.com/v0.2/api_reference/community/graph_vectorstores/langchain_community.graph_vectorstores.extractors.html_link_extractor.HtmlLinkExtractor.html" + name = "HtmlLinkExtractor" + icon = "LangChain" + + inputs = [ + StrInput(name="kind", display_name="Kind of edge", value="hyperlink", required=False), + BoolInput(name="drop_fragments", display_name="Drop URL fragments", value=True, required=False), + DataInput( + name="data_input", + display_name="Input", + info="The texts from which to extract links.", + input_types=["Document", "Data"], + ), + ] + + def get_data_input(self) -> Any: + return self.data_input + + def build_document_transformer(self) -> BaseDocumentTransformer: + return LinkExtractorTransformer( + [HtmlLinkExtractor(kind=self.kind, drop_fragments=self.drop_fragments).as_document_extractor()] + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/json.py b/src/backend/base/langflow/components/langchain_utilities/json.py new file mode 100644 index 000000000000..05732c830440 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/json.py @@ -0,0 +1,45 @@ +from pathlib import Path + +import yaml +from langchain.agents import AgentExecutor +from langchain_community.agent_toolkits import create_json_agent +from langchain_community.agent_toolkits.json.toolkit import JsonToolkit +from langchain_community.tools.json.tool import JsonSpec + +from langflow.base.agents.agent import LCAgentComponent +from langflow.inputs import FileInput, HandleInput + + +class JsonAgentComponent(LCAgentComponent): + display_name = "JsonAgent" + description = "Construct a json agent from an LLM and tools." + name = "JsonAgent" + legacy: bool = True + + inputs = [ + *LCAgentComponent._base_inputs, + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + FileInput( + name="path", + display_name="File Path", + file_types=["json", "yaml", "yml"], + required=True, + ), + ] + + def build_agent(self) -> AgentExecutor: + path = Path(self.path) + if path.suffix in ("yaml", "yml"): + with path.open(encoding="utf-8") as file: + yaml_dict = yaml.safe_load(file) + spec = JsonSpec(dict_=yaml_dict) + else: + spec = JsonSpec.from_file(path) + toolkit = JsonToolkit(spec=spec) + + return create_json_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/langchain_utilities/json_document_builder.py b/src/backend/base/langflow/components/langchain_utilities/json_document_builder.py new file mode 100644 index 000000000000..402a65182180 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/json_document_builder.py @@ -0,0 +1,50 @@ +# JSON Document Builder + +# Build a Document containing a JSON object using a key and another Document page content. + +# **Params** + +# - **Key:** The key to use for the JSON object. +# - **Document:** The Document page to use for the JSON object. + +# **Output** + +# - **Document:** The Document containing the JSON object. + +from langchain_core.documents import Document + +from langflow.custom import CustomComponent +from langflow.services.database.models.base import orjson_dumps + + +class JSONDocumentBuilder(CustomComponent): + display_name: str = "JSON Document Builder" + description: str = "Build a Document containing a JSON object using a key and another Document page content." + name = "JSONDocumentBuilder" + legacy: bool = True + + output_types: list[str] = ["Document"] + documentation: str = "https://docs.langflow.org/components/utilities#json-document-builder" + + field_config = { + "key": {"display_name": "Key"}, + "document": {"display_name": "Document"}, + } + + def build( + self, + key: str, + document: Document, + ) -> Document: + documents = None + if isinstance(document, list): + documents = [ + Document(page_content=orjson_dumps({key: doc.page_content}, indent_2=False)) for doc in document + ] + elif isinstance(document, Document): + documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False)) + else: + msg = f"Expected Document or list of Documents, got {type(document)}" + raise TypeError(msg) + self.repr_value = documents + return documents diff --git a/src/backend/base/langflow/components/langchain_utilities/langchain_hub.py b/src/backend/base/langflow/components/langchain_utilities/langchain_hub.py new file mode 100644 index 000000000000..593afd36909c --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/langchain_hub.py @@ -0,0 +1,126 @@ +import re + +from langchain_core.prompts import HumanMessagePromptTemplate + +from langflow.custom import Component +from langflow.inputs import DefaultPromptField, SecretStrInput, StrInput +from langflow.io import Output +from langflow.schema.message import Message + + +class LangChainHubPromptComponent(Component): + display_name: str = "Prompt Hub" + description: str = "Prompt Component that uses LangChain Hub prompts" + beta = True + icon = "LangChain" + trace_type = "prompt" + name = "LangChain Hub Prompt" + + inputs = [ + SecretStrInput( + name="langchain_api_key", + display_name="Your LangChain API Key", + info="The LangChain API Key to use.", + required=True, + ), + StrInput( + name="langchain_hub_prompt", + display_name="LangChain Hub Prompt", + info="The LangChain Hub prompt to use, i.e., 'efriis/my-first-prompt'", + refresh_button=True, + required=True, + ), + ] + + outputs = [ + Output(display_name="Build Prompt", name="prompt", method="build_prompt"), + ] + + def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None): + # If the field is not langchain_hub_prompt or the value is empty, return the build config as is + if field_name != "langchain_hub_prompt" or not field_value: + return build_config + + # Fetch the template + template = self._fetch_langchain_hub_template() + + # Get the template's messages + if hasattr(template, "messages"): + template_messages = template.messages + else: + template_messages = [HumanMessagePromptTemplate(prompt=template)] + + # Extract the messages from the prompt data + prompt_template = [message_data.prompt for message_data in template_messages] + + # Regular expression to find all instances of {} + pattern = r"\{(.*?)\}" + + # Get all the custom fields + custom_fields: list[str] = [] + full_template = "" + for message in prompt_template: + # Find all matches + matches = re.findall(pattern, message.template) + custom_fields += matches + + # Create a string version of the full template + full_template = full_template + "\n" + message.template + + # No need to reprocess if we have them already + if all("param_" + custom_field in build_config for custom_field in custom_fields): + return build_config + + # Easter egg: Show template in info popup + build_config["langchain_hub_prompt"]["info"] = full_template + + # Remove old parameter inputs if any + for key in build_config.copy(): + if key.startswith("param_"): + del build_config[key] + + # Now create inputs for each + for custom_field in custom_fields: + new_parameter = DefaultPromptField( + name=f"param_{custom_field}", + display_name=custom_field, + info="Fill in the value for {" + custom_field + "}", + ).to_dict() + + # Add the new parameter to the build config + build_config[f"param_{custom_field}"] = new_parameter + + return build_config + + async def build_prompt( + self, + ) -> Message: + # Fetch the template + template = self._fetch_langchain_hub_template() + + # Get the parameters from the attributes + params_dict = {param: getattr(self, "param_" + param, f"{{{param}}}") for param in template.input_variables} + original_params = {k: v.text if hasattr(v, "text") else v for k, v in params_dict.items() if v is not None} + prompt_value = template.invoke(original_params) + + # Update the template with the new value + original_params["template"] = prompt_value.to_string() + + # Now pass the filtered attributes to the function + prompt = Message.from_template(**original_params) + + self.status = prompt.text + + return prompt + + def _fetch_langchain_hub_template(self): + import langchain.hub + + # Check if the api key is provided + if not self.langchain_api_key: + msg = "Please provide a LangChain API Key" + + raise ValueError(msg) + + # Pull the prompt from LangChain Hub + return langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key) diff --git a/src/backend/base/langflow/components/langchain_utilities/language_recursive.py b/src/backend/base/langflow/components/langchain_utilities/language_recursive.py new file mode 100644 index 000000000000..0a454cf43b81 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/language_recursive.py @@ -0,0 +1,48 @@ +from typing import Any + +from langchain_text_splitters import Language, RecursiveCharacterTextSplitter, TextSplitter + +from langflow.base.textsplitters.model import LCTextSplitterComponent +from langflow.inputs import DataInput, DropdownInput, IntInput + + +class LanguageRecursiveTextSplitterComponent(LCTextSplitterComponent): + display_name: str = "Language Recursive Text Splitter" + description: str = "Split text into chunks of a specified length based on language." + documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter" + name = "LanguageRecursiveTextSplitter" + icon = "LangChain" + + inputs = [ + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum length of each chunk.", + value=1000, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="The amount of overlap between chunks.", + value=200, + ), + DataInput( + name="data_input", + display_name="Input", + info="The texts to split.", + input_types=["Document", "Data"], + ), + DropdownInput( + name="code_language", display_name="Code Language", options=[x.value for x in Language], value="python" + ), + ] + + def get_data_input(self) -> Any: + return self.data_input + + def build_text_splitter(self) -> TextSplitter: + return RecursiveCharacterTextSplitter.from_language( + language=Language(self.code_language), + chunk_size=self.chunk_size, + chunk_overlap=self.chunk_overlap, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/language_semantic.py b/src/backend/base/langflow/components/langchain_utilities/language_semantic.py new file mode 100644 index 000000000000..261e6e294b56 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/language_semantic.py @@ -0,0 +1,136 @@ +from langchain.docstore.document import Document +from langchain_experimental.text_splitter import SemanticChunker + +from langflow.base.textsplitters.model import LCTextSplitterComponent +from langflow.io import ( + DropdownInput, + FloatInput, + HandleInput, + IntInput, + MessageTextInput, + Output, +) +from langflow.schema import Data + + +class SemanticTextSplitterComponent(LCTextSplitterComponent): + """Split text into semantically meaningful chunks using semantic similarity.""" + + display_name: str = "Semantic Text Splitter" + name: str = "SemanticTextSplitter" + description: str = "Split text into semantically meaningful chunks using semantic similarity." + documentation = "https://python.langchain.com/docs/how_to/semantic-chunker/" + beta = True # this component is beta because it is imported from langchain_experimental + icon = "LangChain" + + inputs = [ + HandleInput( + name="data_inputs", + display_name="Data Inputs", + info="List of Data objects containing text and metadata to split.", + input_types=["Data"], + is_list=True, + ), + HandleInput( + name="embeddings", + display_name="Embeddings", + info="Embeddings model to use for semantic similarity. Required.", + input_types=["Embeddings"], + is_list=False, + ), + DropdownInput( + name="breakpoint_threshold_type", + display_name="Breakpoint Threshold Type", + info=( + "Method to determine breakpoints. Options: 'percentile', " + "'standard_deviation', 'interquartile'. Defaults to 'percentile'." + ), + value="percentile", + options=["percentile", "standard_deviation", "interquartile"], + ), + FloatInput( + name="breakpoint_threshold_amount", + display_name="Breakpoint Threshold Amount", + info="Numerical amount for the breakpoint threshold.", + value=0.5, + ), + IntInput( + name="number_of_chunks", + display_name="Number of Chunks", + info="Number of chunks to split the text into.", + value=5, + ), + MessageTextInput( + name="sentence_split_regex", + display_name="Sentence Split Regex", + info="Regular expression to split sentences. Optional.", + value="", + advanced=True, + ), + IntInput( + name="buffer_size", + display_name="Buffer Size", + info="Size of the buffer.", + value=0, + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Chunks", name="chunks", method="split_text"), + ] + + def _docs_to_data(self, docs: list[Document]) -> list[Data]: + """Convert a list of Document objects to Data objects.""" + return [Data(text=doc.page_content, data=doc.metadata) for doc in docs] + + def split_text(self) -> list[Data]: + """Split the input data into semantically meaningful chunks.""" + try: + embeddings = getattr(self, "embeddings", None) + if embeddings is None: + error_msg = "An embeddings model is required for SemanticTextSplitter." + raise ValueError(error_msg) + + if not self.data_inputs: + error_msg = "Data inputs cannot be empty." + raise ValueError(error_msg) + + documents = [] + for _input in self.data_inputs: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + error_msg = f"Invalid data input type: {_input}" + raise TypeError(error_msg) + + if not documents: + error_msg = "No valid Data objects found in data_inputs." + raise ValueError(error_msg) + + texts = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + + splitter_params = { + "embeddings": embeddings, + "breakpoint_threshold_type": self.breakpoint_threshold_type or "percentile", + "breakpoint_threshold_amount": self.breakpoint_threshold_amount, + "number_of_chunks": self.number_of_chunks, + "buffer_size": self.buffer_size, + } + + if self.sentence_split_regex: + splitter_params["sentence_split_regex"] = self.sentence_split_regex + + splitter = SemanticChunker(**splitter_params) + docs = splitter.create_documents(texts, metadatas=metadatas) + + data = self._docs_to_data(docs) + self.status = data + + except Exception as e: + error_msg = f"An error occurred during semantic splitting: {e}" + raise RuntimeError(error_msg) from e + + else: + return data diff --git a/src/backend/base/langflow/components/langchain_utilities/llm_checker.py b/src/backend/base/langflow/components/langchain_utilities/llm_checker.py new file mode 100644 index 000000000000..b9ba6a93d613 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/llm_checker.py @@ -0,0 +1,39 @@ +from langchain.chains import LLMCheckerChain + +from langflow.base.chains.model import LCChainComponent +from langflow.field_typing import Message +from langflow.inputs import HandleInput, MultilineInput + + +class LLMCheckerChainComponent(LCChainComponent): + display_name = "LLMCheckerChain" + description = "Chain for question-answering with self-verification." + documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" + name = "LLMCheckerChain" + legacy: bool = True + icon = "LangChain" + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + info="The input value to pass to the chain.", + required=True, + ), + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + ] + + def invoke_chain(self) -> Message: + chain = LLMCheckerChain.from_llm(llm=self.llm) + response = chain.invoke( + {chain.input_key: self.input_value}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + result = response.get(chain.output_key, "") + result = str(result) + self.status = result + return Message(text=result) diff --git a/src/backend/base/langflow/components/langchain_utilities/llm_math.py b/src/backend/base/langflow/components/langchain_utilities/llm_math.py new file mode 100644 index 000000000000..3d592e4a3088 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/llm_math.py @@ -0,0 +1,42 @@ +from langchain.chains import LLMMathChain + +from langflow.base.chains.model import LCChainComponent +from langflow.field_typing import Message +from langflow.inputs import HandleInput, MultilineInput +from langflow.template import Output + + +class LLMMathChainComponent(LCChainComponent): + display_name = "LLMMathChain" + description = "Chain that interprets a prompt and executes python code to do math." + documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_math" + name = "LLMMathChain" + legacy: bool = True + icon = "LangChain" + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + info="The input value to pass to the chain.", + required=True, + ), + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + ] + + outputs = [Output(display_name="Text", name="text", method="invoke_chain")] + + def invoke_chain(self) -> Message: + chain = LLMMathChain.from_llm(llm=self.llm) + response = chain.invoke( + {chain.input_key: self.input_value}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + result = response.get(chain.output_key, "") + result = str(result) + self.status = result + return Message(text=result) diff --git a/src/backend/base/langflow/components/langchain_utilities/natural_language.py b/src/backend/base/langflow/components/langchain_utilities/natural_language.py new file mode 100644 index 000000000000..3a3b3a93874e --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/natural_language.py @@ -0,0 +1,60 @@ +from typing import Any + +from langchain_text_splitters import NLTKTextSplitter, TextSplitter + +from langflow.base.textsplitters.model import LCTextSplitterComponent +from langflow.inputs import DataInput, IntInput, MessageTextInput +from langflow.utils.util import unescape_string + + +class NaturalLanguageTextSplitterComponent(LCTextSplitterComponent): + display_name = "Natural Language Text Splitter" + description = "Split text based on natural language boundaries, optimized for a specified language." + documentation = ( + "https://python.langchain.com/v0.1/docs/modules/data_connection/document_transformers/split_by_token/#nltk" + ) + name = "NaturalLanguageTextSplitter" + icon = "LangChain" + inputs = [ + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum number of characters in each chunk after splitting.", + value=1000, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="The number of characters that overlap between consecutive chunks.", + value=200, + ), + DataInput( + name="data_input", + display_name="Input", + info="The text data to be split.", + input_types=["Document", "Data"], + ), + MessageTextInput( + name="separator", + display_name="Separator", + info='The character(s) to use as a delimiter when splitting text.\nDefaults to "\\n\\n" if left empty.', + ), + MessageTextInput( + name="language", + display_name="Language", + info='The language of the text. Default is "English". ' + "Supports multiple languages for better text boundary recognition.", + ), + ] + + def get_data_input(self) -> Any: + return self.data_input + + def build_text_splitter(self) -> TextSplitter: + separator = unescape_string(self.separator) if self.separator else "\n\n" + return NLTKTextSplitter( + language=self.language.lower() if self.language else "english", + separator=separator, + chunk_size=self.chunk_size, + chunk_overlap=self.chunk_overlap, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/openai_tools.py b/src/backend/base/langflow/components/langchain_utilities/openai_tools.py new file mode 100644 index 000000000000..c529330c5202 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/openai_tools.py @@ -0,0 +1,50 @@ +from langchain.agents import create_openai_tools_agent +from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, PromptTemplate + +from langflow.base.agents.agent import LCToolsAgentComponent +from langflow.inputs import MultilineInput +from langflow.inputs.inputs import DataInput, HandleInput +from langflow.schema import Data + + +class OpenAIToolsAgentComponent(LCToolsAgentComponent): + display_name: str = "OpenAI Tools Agent" + description: str = "Agent that uses tools via openai-tools." + icon = "LangChain" + name = "OpenAIToolsAgent" + + inputs = [ + *LCToolsAgentComponent._base_inputs, + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel", "ToolEnabledLanguageModel"], + required=True, + ), + MultilineInput( + name="system_prompt", + display_name="System Prompt", + info="System prompt for the agent.", + value="You are a helpful assistant", + ), + MultilineInput( + name="user_prompt", display_name="Prompt", info="This prompt must contain 'input' key.", value="{input}" + ), + DataInput(name="chat_history", display_name="Chat History", is_list=True, advanced=True), + ] + + def get_chat_history_data(self) -> list[Data] | None: + return self.chat_history + + def create_agent_runnable(self): + if "input" not in self.user_prompt: + msg = "Prompt must contain 'input' key." + raise ValueError(msg) + messages = [ + ("system", self.system_prompt), + ("placeholder", "{chat_history}"), + HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=["input"], template=self.user_prompt)), + ("placeholder", "{agent_scratchpad}"), + ] + prompt = ChatPromptTemplate.from_messages(messages) + return create_openai_tools_agent(self.llm, self.tools, prompt) diff --git a/src/backend/base/langflow/components/langchain_utilities/openapi.py b/src/backend/base/langflow/components/langchain_utilities/openapi.py new file mode 100644 index 000000000000..2b58295f3831 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/openapi.py @@ -0,0 +1,48 @@ +from pathlib import Path + +import yaml +from langchain.agents import AgentExecutor +from langchain_community.agent_toolkits import create_openapi_agent +from langchain_community.agent_toolkits.openapi.toolkit import OpenAPIToolkit +from langchain_community.tools.json.tool import JsonSpec +from langchain_community.utilities.requests import TextRequestsWrapper + +from langflow.base.agents.agent import LCAgentComponent +from langflow.inputs import BoolInput, FileInput, HandleInput + + +class OpenAPIAgentComponent(LCAgentComponent): + display_name = "OpenAPI Agent" + description = "Agent to interact with OpenAPI API." + name = "OpenAPIAgent" + icon = "LangChain" + inputs = [ + *LCAgentComponent._base_inputs, + HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), + FileInput(name="path", display_name="File Path", file_types=["json", "yaml", "yml"], required=True), + BoolInput(name="allow_dangerous_requests", display_name="Allow Dangerous Requests", value=False, required=True), + ] + + def build_agent(self) -> AgentExecutor: + path = Path(self.path) + if path.suffix in ("yaml", "yml"): + with path.open(encoding="utf-8") as file: + yaml_dict = yaml.safe_load(file) + spec = JsonSpec(dict_=yaml_dict) + else: + spec = JsonSpec.from_file(path) + requests_wrapper = TextRequestsWrapper() + toolkit = OpenAPIToolkit.from_llm( + llm=self.llm, + json_spec=spec, + requests_wrapper=requests_wrapper, + allow_dangerous_requests=self.allow_dangerous_requests, + ) + + agent_args = self.get_agent_kwargs() + + # This is bit weird - generally other create_*_agent functions have max_iterations in the + # `agent_executor_kwargs`, but openai has this parameter passed directly. + agent_args["max_iterations"] = agent_args["agent_executor_kwargs"]["max_iterations"] + del agent_args["agent_executor_kwargs"]["max_iterations"] + return create_openapi_agent(llm=self.llm, toolkit=toolkit, **agent_args) diff --git a/src/backend/base/langflow/components/langchain_utilities/recursive_character.py b/src/backend/base/langflow/components/langchain_utilities/recursive_character.py new file mode 100644 index 000000000000..6dd9c6173bca --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/recursive_character.py @@ -0,0 +1,59 @@ +from typing import Any + +from langchain_text_splitters import RecursiveCharacterTextSplitter, TextSplitter + +from langflow.base.textsplitters.model import LCTextSplitterComponent +from langflow.inputs.inputs import DataInput, IntInput, MessageTextInput +from langflow.utils.util import unescape_string + + +class RecursiveCharacterTextSplitterComponent(LCTextSplitterComponent): + display_name: str = "Recursive Character Text Splitter" + description: str = "Split text trying to keep all related text together." + documentation: str = "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter" + name = "RecursiveCharacterTextSplitter" + icon = "LangChain" + + inputs = [ + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum length of each chunk.", + value=1000, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="The amount of overlap between chunks.", + value=200, + ), + DataInput( + name="data_input", + display_name="Input", + info="The texts to split.", + input_types=["Document", "Data"], + ), + MessageTextInput( + name="separators", + display_name="Separators", + info='The characters to split on.\nIf left empty defaults to ["\\n\\n", "\\n", " ", ""].', + is_list=True, + ), + ] + + def get_data_input(self) -> Any: + return self.data_input + + def build_text_splitter(self) -> TextSplitter: + if not self.separators: + separators: list[str] | None = None + else: + # check if the separators list has escaped characters + # if there are escaped characters, unescape them + separators = [unescape_string(x) for x in self.separators] + + return RecursiveCharacterTextSplitter( + separators=separators, + chunk_size=self.chunk_size, + chunk_overlap=self.chunk_overlap, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py b/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py new file mode 100644 index 000000000000..deebd7b54519 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/retrieval_qa.py @@ -0,0 +1,81 @@ +from langchain.chains import RetrievalQA + +from langflow.base.chains.model import LCChainComponent +from langflow.field_typing import Message +from langflow.inputs import BoolInput, DropdownInput, HandleInput, MultilineInput + + +class RetrievalQAComponent(LCChainComponent): + display_name = "Retrieval QA" + description = "Chain for question-answering querying sources from a retriever." + name = "RetrievalQA" + legacy: bool = True + icon = "LangChain" + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + info="The input value to pass to the chain.", + required=True, + ), + DropdownInput( + name="chain_type", + display_name="Chain Type", + info="Chain type to use.", + options=["Stuff", "Map Reduce", "Refine", "Map Rerank"], + value="Stuff", + advanced=True, + ), + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + HandleInput( + name="retriever", + display_name="Retriever", + input_types=["Retriever"], + required=True, + ), + HandleInput( + name="memory", + display_name="Memory", + input_types=["BaseChatMemory"], + ), + BoolInput( + name="return_source_documents", + display_name="Return Source Documents", + value=False, + ), + ] + + def invoke_chain(self) -> Message: + chain_type = self.chain_type.lower().replace(" ", "_") + if self.memory: + self.memory.input_key = "query" + self.memory.output_key = "result" + + runnable = RetrievalQA.from_chain_type( + llm=self.llm, + chain_type=chain_type, + retriever=self.retriever, + memory=self.memory, + # always include to help debugging + # + return_source_documents=True, + ) + + result = runnable.invoke( + {"query": self.input_value}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + + source_docs = self.to_data(result.get("source_documents", keys=[])) + result_str = str(result.get("result", "")) + if self.return_source_documents and len(source_docs): + references_str = self.create_references_from_data(source_docs) + result_str = f"{result_str}\n{references_str}" + # put the entire result to debug history, query and content + self.status = {**result, "source_documents": source_docs, "output": result_str} + return result_str diff --git a/src/backend/base/langflow/components/langchain_utilities/runnable_executor.py b/src/backend/base/langflow/components/langchain_utilities/runnable_executor.py new file mode 100644 index 000000000000..7bcd462799c1 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/runnable_executor.py @@ -0,0 +1,137 @@ +from langchain.agents import AgentExecutor + +from langflow.custom import Component +from langflow.inputs import BoolInput, HandleInput, MessageTextInput +from langflow.schema.message import Message +from langflow.template import Output + + +class RunnableExecComponent(Component): + description = "Execute a runnable. It will try to guess the input and output keys." + display_name = "Runnable Executor" + name = "RunnableExecutor" + beta: bool = True + icon = "LangChain" + + inputs = [ + MessageTextInput(name="input_value", display_name="Input", required=True), + HandleInput( + name="runnable", + display_name="Agent Executor", + input_types=["Chain", "AgentExecutor", "Agent", "Runnable"], + required=True, + ), + MessageTextInput( + name="input_key", + display_name="Input Key", + value="input", + advanced=True, + ), + MessageTextInput( + name="output_key", + display_name="Output Key", + value="output", + advanced=True, + ), + BoolInput( + name="use_stream", + display_name="Stream", + value=False, + ), + ] + + outputs = [ + Output( + display_name="Text", + name="text", + method="build_executor", + ), + ] + + def get_output(self, result, input_key, output_key): + """Retrieves the output value from the given result dictionary based on the specified input and output keys. + + Args: + result (dict): The result dictionary containing the output value. + input_key (str): The key used to retrieve the input value from the result dictionary. + output_key (str): The key used to retrieve the output value from the result dictionary. + + Returns: + tuple: A tuple containing the output value and the status message. + + """ + possible_output_keys = ["answer", "response", "output", "result", "text"] + status = "" + result_value = None + + if output_key in result: + result_value = result.get(output_key) + elif len(result) == 2 and input_key in result: # noqa: PLR2004 + # get the other key from the result dict + other_key = next(k for k in result if k != input_key) + if other_key == output_key: + result_value = result.get(output_key) + else: + status += f"Warning: The output key is not '{output_key}'. The output key is '{other_key}'." + result_value = result.get(other_key) + elif len(result) == 1: + result_value = next(iter(result.values())) + elif any(k in result for k in possible_output_keys): + for key in possible_output_keys: + if key in result: + result_value = result.get(key) + status += f"Output key: '{key}'." + break + if result_value is None: + result_value = result + status += f"Warning: The output key is not '{output_key}'." + else: + result_value = result + status += f"Warning: The output key is not '{output_key}'." + + return result_value, status + + def get_input_dict(self, runnable, input_key, input_value): + """Returns a dictionary containing the input key-value pair for the given runnable. + + Args: + runnable: The runnable object. + input_key: The key for the input value. + input_value: The value for the input key. + + Returns: + input_dict: A dictionary containing the input key-value pair. + status: A status message indicating if the input key is not in the runnable's input keys. + """ + input_dict = {} + status = "" + if hasattr(runnable, "input_keys"): + # Check if input_key is in the runnable's input_keys + if input_key in runnable.input_keys: + input_dict[input_key] = input_value + else: + input_dict = dict.fromkeys(runnable.input_keys, input_value) + status = f"Warning: The input key is not '{input_key}'. The input key is '{runnable.input_keys}'." + return input_dict, status + + async def build_executor(self) -> Message: + input_dict, status = self.get_input_dict(self.runnable, self.input_key, self.input_value) + if not isinstance(self.runnable, AgentExecutor): + msg = "The runnable must be an AgentExecutor" + raise TypeError(msg) + + if self.use_stream: + return self.astream_events(input_dict) + result = await self.runnable.ainvoke(input_dict) + result_value, _status = self.get_output(result, self.input_key, self.output_key) + status += _status + status += f"\n\nOutput: {result_value}\n\nRaw Output: {result}" + self.status = status + return result_value + + async def astream_events(self, runnable_input): + async for event in self.runnable.astream_events(runnable_input, version="v1"): + if event.get("event") != "on_chat_model_stream": + continue + + yield event.get("data").get("chunk") diff --git a/src/backend/base/langflow/components/langchain_utilities/self_query.py b/src/backend/base/langflow/components/langchain_utilities/self_query.py new file mode 100644 index 000000000000..86aaeb71f784 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/self_query.py @@ -0,0 +1,80 @@ +from langchain.chains.query_constructor.base import AttributeInfo +from langchain.retrievers.self_query.base import SelfQueryRetriever + +from langflow.custom import Component +from langflow.inputs import HandleInput, MessageTextInput +from langflow.io import Output +from langflow.schema import Data +from langflow.schema.message import Message + + +class SelfQueryRetrieverComponent(Component): + display_name = "Self Query Retriever" + description = "Retriever that uses a vector store and an LLM to generate the vector store queries." + name = "SelfQueryRetriever" + icon = "LangChain" + legacy: bool = True + + inputs = [ + HandleInput( + name="query", + display_name="Query", + info="Query to be passed as input.", + input_types=["Message", "Text"], + ), + HandleInput( + name="vectorstore", + display_name="Vector Store", + info="Vector Store to be passed as input.", + input_types=["VectorStore"], + ), + HandleInput( + name="attribute_infos", + display_name="Metadata Field Info", + info="Metadata Field Info to be passed as input.", + input_types=["Data"], + is_list=True, + ), + MessageTextInput( + name="document_content_description", + display_name="Document Content Description", + info="Document Content Description to be passed as input.", + ), + HandleInput( + name="llm", + display_name="LLM", + info="LLM to be passed as input.", + input_types=["LanguageModel"], + ), + ] + + outputs = [ + Output( + display_name="Retrieved Documents", + name="documents", + method="retrieve_documents", + ), + ] + + def retrieve_documents(self) -> list[Data]: + metadata_field_infos = [AttributeInfo(**value.data) for value in self.attribute_infos] + self_query_retriever = SelfQueryRetriever.from_llm( + llm=self.llm, + vectorstore=self.vectorstore, + document_contents=self.document_content_description, + metadata_field_info=metadata_field_infos, + enable_limit=True, + ) + + if isinstance(self.query, Message): + input_text = self.query.text + elif isinstance(self.query, str): + input_text = self.query + else: + msg = f"Query type {type(self.query)} not supported." + raise TypeError(msg) + + documents = self_query_retriever.invoke(input=input_text, config={"callbacks": self.get_langchain_callbacks()}) + data = [Data.from_document(document) for document in documents] + self.status = data + return data diff --git a/src/backend/base/langflow/components/langchain_utilities/spider.py b/src/backend/base/langflow/components/langchain_utilities/spider.py new file mode 100644 index 000000000000..30c2aba325f9 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/spider.py @@ -0,0 +1,142 @@ +from spider.spider import Spider + +from langflow.base.langchain_utilities.spider_constants import MODES +from langflow.custom import Component +from langflow.io import ( + BoolInput, + DictInput, + DropdownInput, + IntInput, + Output, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class SpiderTool(Component): + display_name: str = "Spider Web Crawler & Scraper" + description: str = "Spider API for web crawling and scraping." + output_types: list[str] = ["Document"] + documentation: str = "https://spider.cloud/docs/api" + + inputs = [ + SecretStrInput( + name="spider_api_key", + display_name="Spider API Key", + required=True, + password=True, + info="The Spider API Key, get it from https://spider.cloud", + ), + StrInput( + name="url", + display_name="URL", + required=True, + info="The URL to scrape or crawl", + ), + DropdownInput( + name="mode", + display_name="Mode", + required=True, + options=MODES, + value=MODES[0], + info="The mode of operation: scrape or crawl", + ), + IntInput( + name="limit", + display_name="Limit", + info="The maximum amount of pages allowed to crawl per website. Set to 0 to crawl all pages.", + advanced=True, + ), + IntInput( + name="depth", + display_name="Depth", + info="The crawl limit for maximum depth. If 0, no limit will be applied.", + advanced=True, + ), + StrInput( + name="blacklist", + display_name="Blacklist", + info="Blacklist paths that you do not want to crawl. Use Regex patterns.", + advanced=True, + ), + StrInput( + name="whitelist", + display_name="Whitelist", + info="Whitelist paths that you want to crawl, ignoring all other routes. Use Regex patterns.", + advanced=True, + ), + BoolInput( + name="readability", + display_name="Use Readability", + info="Use readability to pre-process the content for reading.", + advanced=True, + ), + IntInput( + name="request_timeout", + display_name="Request Timeout", + info="Timeout for the request in seconds.", + advanced=True, + ), + BoolInput( + name="metadata", + display_name="Metadata", + info="Include metadata in the response.", + advanced=True, + ), + DictInput( + name="params", + display_name="Additional Parameters", + info="Additional parameters to pass to the API. If provided, other inputs will be ignored.", + ), + ] + + outputs = [ + Output(display_name="Markdown", name="content", method="crawl"), + ] + + def crawl(self) -> list[Data]: + if self.params: + parameters = self.params["data"] + else: + parameters = { + "limit": self.limit or None, + "depth": self.depth or None, + "blacklist": self.blacklist or None, + "whitelist": self.whitelist or None, + "readability": self.readability, + "request_timeout": self.request_timeout or None, + "metadata": self.metadata, + "return_format": "markdown", + } + + app = Spider(api_key=self.spider_api_key) + if self.mode == "scrape": + parameters["limit"] = 1 + result = app.scrape_url(self.url, parameters) + elif self.mode == "crawl": + result = app.crawl_url(self.url, parameters) + else: + msg = f"Invalid mode: {self.mode}. Must be 'scrape' or 'crawl'." + raise ValueError(msg) + + records = [] + + for record in result: + if self.metadata: + records.append( + Data( + data={ + "content": record["content"], + "url": record["url"], + "metadata": record["metadata"], + } + ) + ) + else: + records.append(Data(data={"content": record["content"], "url": record["url"]})) + return records + + +class SpiderToolError(Exception): + """SpiderTool error.""" diff --git a/src/backend/base/langflow/components/langchain_utilities/sql.py b/src/backend/base/langflow/components/langchain_utilities/sql.py new file mode 100644 index 000000000000..e018dec628ce --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/sql.py @@ -0,0 +1,34 @@ +from langchain.agents import AgentExecutor +from langchain_community.agent_toolkits import SQLDatabaseToolkit +from langchain_community.agent_toolkits.sql.base import create_sql_agent +from langchain_community.utilities import SQLDatabase + +from langflow.base.agents.agent import LCAgentComponent +from langflow.inputs import HandleInput, MessageTextInput + + +class SQLAgentComponent(LCAgentComponent): + display_name = "SQLAgent" + description = "Construct an SQL agent from an LLM and tools." + name = "SQLAgent" + icon = "LangChain" + inputs = [ + *LCAgentComponent._base_inputs, + HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), + MessageTextInput(name="database_uri", display_name="Database URI", required=True), + HandleInput( + name="extra_tools", + display_name="Extra Tools", + input_types=["Tool", "BaseTool"], + is_list=True, + advanced=True, + ), + ] + + def build_agent(self) -> AgentExecutor: + db = SQLDatabase.from_uri(self.database_uri) + toolkit = SQLDatabaseToolkit(db=db, llm=self.llm) + agent_args = self.get_agent_kwargs() + agent_args["max_iterations"] = agent_args["agent_executor_kwargs"]["max_iterations"] + del agent_args["agent_executor_kwargs"]["max_iterations"] + return create_sql_agent(llm=self.llm, toolkit=toolkit, extra_tools=self.extra_tools or [], **agent_args) diff --git a/src/backend/base/langflow/components/langchain_utilities/sql_database.py b/src/backend/base/langflow/components/langchain_utilities/sql_database.py new file mode 100644 index 000000000000..fd86af2b7bf5 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/sql_database.py @@ -0,0 +1,34 @@ +from langchain_community.utilities.sql_database import SQLDatabase +from sqlalchemy import create_engine +from sqlalchemy.pool import StaticPool + +from langflow.custom import Component +from langflow.io import ( + Output, + StrInput, +) + + +class SQLDatabaseComponent(Component): + display_name = "SQLDatabase" + description = "SQL Database" + name = "SQLDatabase" + + inputs = [ + StrInput(name="uri", display_name="URI", info="URI to the database.", required=True), + ] + + outputs = [ + Output(display_name="SQLDatabase", name="SQLDatabase", method="build_sqldatabase"), + ] + + def clean_up_uri(self, uri: str) -> str: + if uri.startswith("postgres://"): + uri = uri.replace("postgres://", "postgresql://") + return uri.strip() + + def build_sqldatabase(self) -> SQLDatabase: + uri = self.clean_up_uri(self.uri) + # Create an engine using SQLAlchemy with StaticPool + engine = create_engine(uri, poolclass=StaticPool) + return SQLDatabase(engine) diff --git a/src/backend/base/langflow/components/langchain_utilities/sql_generator.py b/src/backend/base/langflow/components/langchain_utilities/sql_generator.py new file mode 100644 index 000000000000..3743aeffe7f4 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/sql_generator.py @@ -0,0 +1,78 @@ +from typing import TYPE_CHECKING + +from langchain.chains import create_sql_query_chain +from langchain_core.prompts import PromptTemplate + +from langflow.base.chains.model import LCChainComponent +from langflow.field_typing import Message +from langflow.inputs import HandleInput, IntInput, MultilineInput +from langflow.template import Output + +if TYPE_CHECKING: + from langchain_core.runnables import Runnable + + +class SQLGeneratorComponent(LCChainComponent): + display_name = "Natural Language to SQL" + description = "Generate SQL from natural language." + name = "SQLGenerator" + legacy: bool = True + icon = "LangChain" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + info="The input value to pass to the chain.", + required=True, + ), + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + HandleInput( + name="db", + display_name="SQLDatabase", + input_types=["SQLDatabase"], + required=True, + ), + IntInput( + name="top_k", + display_name="Top K", + info="The number of results per select statement to return.", + value=5, + ), + MultilineInput( + name="prompt", + display_name="Prompt", + info="The prompt must contain `{question}`.", + ), + ] + + outputs = [Output(display_name="Text", name="text", method="invoke_chain")] + + def invoke_chain(self) -> Message: + prompt_template = PromptTemplate.from_template(template=self.prompt) if self.prompt else None + + if self.top_k < 1: + msg = "Top K must be greater than 0." + raise ValueError(msg) + + if not prompt_template: + sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, k=self.top_k) + else: + # Check if {question} is in the prompt + if "{question}" not in prompt_template.template or "question" not in prompt_template.input_variables: + msg = "Prompt must contain `{question}` to be used with Natural Language to SQL." + raise ValueError(msg) + sql_query_chain = create_sql_query_chain(llm=self.llm, db=self.db, prompt=prompt_template, k=self.top_k) + query_writer: Runnable = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()} + response = query_writer.invoke( + {"question": self.input_value}, + config={"callbacks": self.get_langchain_callbacks()}, + ) + query = response.get("query") + self.status = query + return query diff --git a/src/backend/base/langflow/components/langchain_utilities/tool_calling.py b/src/backend/base/langflow/components/langchain_utilities/tool_calling.py new file mode 100644 index 000000000000..1f16bbaad83d --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/tool_calling.py @@ -0,0 +1,55 @@ +from langchain.agents import create_tool_calling_agent +from langchain_core.prompts import ChatPromptTemplate + +from langflow.base.agents.agent import LCToolsAgentComponent +from langflow.inputs import MessageTextInput +from langflow.inputs.inputs import DataInput, HandleInput +from langflow.schema import Data + + +class ToolCallingAgentComponent(LCToolsAgentComponent): + display_name: str = "Tool Calling Agent" + description: str = "An agent designed to utilize various tools seamlessly within workflows." + icon = "LangChain" + name = "ToolCallingAgent" + + inputs = [ + *LCToolsAgentComponent._base_inputs, + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + info="Language model that the agent utilizes to perform tasks effectively.", + ), + MessageTextInput( + name="system_prompt", + display_name="System Prompt", + info="System prompt to guide the agent's behavior.", + value="You are a helpful assistant that can use tools to answer questions and perform tasks.", + ), + DataInput( + name="chat_history", + display_name="Chat Memory", + is_list=True, + advanced=True, + info="This input stores the chat history, allowing the agent to remember previous conversations.", + ), + ] + + def get_chat_history_data(self) -> list[Data] | None: + return self.chat_history + + def create_agent_runnable(self): + messages = [ + ("system", self.system_prompt), + ("placeholder", "{chat_history}"), + ("human", self.input_value), + ("placeholder", "{agent_scratchpad}"), + ] + prompt = ChatPromptTemplate.from_messages(messages) + try: + return create_tool_calling_agent(self.llm, self.tools or [], prompt) + except NotImplementedError as e: + message = f"{self.display_name} does not support tool calling. Please try using a compatible model." + raise NotImplementedError(message) from e diff --git a/src/backend/base/langflow/components/langchain_utilities/vector_store.py b/src/backend/base/langflow/components/langchain_utilities/vector_store.py new file mode 100644 index 000000000000..dfd56deb0ad2 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/vector_store.py @@ -0,0 +1,20 @@ +from langchain_core.vectorstores import VectorStoreRetriever + +from langflow.custom import CustomComponent +from langflow.field_typing import VectorStore + + +class VectoStoreRetrieverComponent(CustomComponent): + display_name = "VectorStore Retriever" + description = "A vector store retriever" + name = "VectorStoreRetriever" + legacy: bool = True + icon = "LangChain" + + def build_config(self): + return { + "vectorstore": {"display_name": "Vector Store", "type": VectorStore}, + } + + def build(self, vectorstore: VectorStore) -> VectorStoreRetriever: + return vectorstore.as_retriever() diff --git a/src/backend/base/langflow/components/langchain_utilities/vector_store_info.py b/src/backend/base/langflow/components/langchain_utilities/vector_store_info.py new file mode 100644 index 000000000000..4658f05dcfb8 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/vector_store_info.py @@ -0,0 +1,49 @@ +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo + +from langflow.custom import Component +from langflow.inputs import HandleInput, MessageTextInput, MultilineInput +from langflow.template import Output + + +class VectorStoreInfoComponent(Component): + display_name = "VectorStoreInfo" + description = "Information about a VectorStore" + name = "VectorStoreInfo" + legacy: bool = True + icon = "LangChain" + + inputs = [ + MessageTextInput( + name="vectorstore_name", + display_name="Name", + info="Name of the VectorStore", + required=True, + ), + MultilineInput( + name="vectorstore_description", + display_name="Description", + info="Description of the VectorStore", + required=True, + ), + HandleInput( + name="input_vectorstore", + display_name="Vector Store", + input_types=["VectorStore"], + required=True, + ), + ] + + outputs = [ + Output(display_name="Vector Store Info", name="info", method="build_info"), + ] + + def build_info(self) -> VectorStoreInfo: + self.status = { + "name": self.vectorstore_name, + "description": self.vectorstore_description, + } + return VectorStoreInfo( + vectorstore=self.input_vectorstore, + description=self.vectorstore_description, + name=self.vectorstore_name, + ) diff --git a/src/backend/base/langflow/components/langchain_utilities/vector_store_router.py b/src/backend/base/langflow/components/langchain_utilities/vector_store_router.py new file mode 100644 index 000000000000..6c44c2d5cc57 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/vector_store_router.py @@ -0,0 +1,33 @@ +from langchain.agents import AgentExecutor, create_vectorstore_router_agent +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit + +from langflow.base.agents.agent import LCAgentComponent +from langflow.inputs import HandleInput + + +class VectorStoreRouterAgentComponent(LCAgentComponent): + display_name = "VectorStoreRouterAgent" + description = "Construct an agent from a Vector Store Router." + name = "VectorStoreRouterAgent" + legacy: bool = True + + inputs = [ + *LCAgentComponent._base_inputs, + HandleInput( + name="llm", + display_name="Language Model", + input_types=["LanguageModel"], + required=True, + ), + HandleInput( + name="vectorstores", + display_name="Vector Stores", + input_types=["VectorStoreInfo"], + is_list=True, + required=True, + ), + ] + + def build_agent(self) -> AgentExecutor: + toolkit = VectorStoreRouterToolkit(vectorstores=self.vectorstores, llm=self.llm) + return create_vectorstore_router_agent(llm=self.llm, toolkit=toolkit, **self.get_agent_kwargs()) diff --git a/src/backend/base/langflow/components/langchain_utilities/xml.py b/src/backend/base/langflow/components/langchain_utilities/xml.py new file mode 100644 index 000000000000..5e31b4d13100 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/xml.py @@ -0,0 +1,68 @@ +from langchain.agents import create_xml_agent +from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, PromptTemplate + +from langflow.base.agents.agent import LCToolsAgentComponent +from langflow.inputs import MultilineInput +from langflow.inputs.inputs import DataInput, HandleInput +from langflow.schema import Data + + +class XMLAgentComponent(LCToolsAgentComponent): + display_name: str = "XML Agent" + description: str = "Agent that uses tools formatting instructions as xml to the Language Model." + icon = "LangChain" + beta = True + name = "XMLAgent" + inputs = [ + *LCToolsAgentComponent._base_inputs, + HandleInput(name="llm", display_name="Language Model", input_types=["LanguageModel"], required=True), + DataInput(name="chat_history", display_name="Chat History", is_list=True, advanced=True), + MultilineInput( + name="system_prompt", + display_name="System Prompt", + info="System prompt for the agent.", + value="""You are a helpful assistant. Help the user answer any questions. + +You have access to the following tools: + +{tools} + +In order to use a tool, you can use and tags. You will then get back a response in the form + +For example, if you have a tool called 'search' that could run a google search, in order to search for the weather in SF you would respond: + +searchweather in SF + +64 degrees + +When you are done, respond with a final answer between . For example: + +The weather in SF is 64 degrees + +Begin! + +Question: {input} + +{agent_scratchpad} + """, # noqa: E501 + ), + MultilineInput( + name="user_prompt", display_name="Prompt", info="This prompt must contain 'input' key.", value="{input}" + ), + ] + + def get_chat_history_data(self) -> list[Data] | None: + return self.chat_history + + def create_agent_runnable(self): + if "input" not in self.user_prompt: + msg = "Prompt must contain 'input' key." + raise ValueError(msg) + messages = [ + ("system", self.system_prompt), + ("placeholder", "{chat_history}"), + HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=["input"], template=self.user_prompt)), + ("ai", "{agent_scratchpad}"), + ] + prompt = ChatPromptTemplate.from_messages(messages) + return create_xml_agent(self.llm, self.tools, prompt) diff --git a/src/backend/base/langflow/components/link_extractors/__init__.py b/src/backend/base/langflow/components/link_extractors/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/components/logic/__init__.py b/src/backend/base/langflow/components/logic/__init__.py new file mode 100644 index 000000000000..039d6c52d7e5 --- /dev/null +++ b/src/backend/base/langflow/components/logic/__init__.py @@ -0,0 +1,19 @@ +from .conditional_router import ConditionalRouterComponent +from .data_conditional_router import DataConditionalRouterComponent +from .flow_tool import FlowToolComponent +from .listen import ListenComponent +from .notify import NotifyComponent +from .pass_message import PassMessageComponent +from .run_flow import RunFlowComponent +from .sub_flow import SubFlowComponent + +__all__ = [ + "DataConditionalRouterComponent", + "FlowToolComponent", + "ListenComponent", + "NotifyComponent", + "RunFlowComponent", + "SubFlowComponent", + "ConditionalRouterComponent", + "PassMessageComponent", +] diff --git a/src/backend/base/langflow/components/logic/conditional_router.py b/src/backend/base/langflow/components/logic/conditional_router.py new file mode 100644 index 000000000000..77e166bf7c1a --- /dev/null +++ b/src/backend/base/langflow/components/logic/conditional_router.py @@ -0,0 +1,117 @@ +from langflow.custom import Component +from langflow.io import BoolInput, DropdownInput, IntInput, MessageInput, MessageTextInput, Output +from langflow.schema.message import Message + + +class ConditionalRouterComponent(Component): + display_name = "If-Else" + description = "Routes an input message to a corresponding output based on text comparison." + icon = "split" + name = "ConditionalRouter" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.__iteration_updated = False + + inputs = [ + MessageTextInput( + name="input_text", + display_name="Text Input", + info="The primary text input for the operation.", + ), + MessageTextInput( + name="match_text", + display_name="Match Text", + info="The text input to compare against.", + ), + DropdownInput( + name="operator", + display_name="Operator", + options=["equals", "not equals", "contains", "starts with", "ends with"], + info="The operator to apply for comparing the texts.", + value="equals", + ), + BoolInput( + name="case_sensitive", + display_name="Case Sensitive", + info="If true, the comparison will be case sensitive.", + value=False, + advanced=True, + ), + MessageInput( + name="message", + display_name="Message", + info="The message to pass through either route.", + advanced=True, + ), + IntInput( + name="max_iterations", + display_name="Max Iterations", + info="The maximum number of iterations for the conditional router.", + value=10, + ), + DropdownInput( + name="default_route", + display_name="Default Route", + options=["true_result", "false_result"], + info="The default route to take when max iterations are reached.", + value="false_result", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="True", name="true_result", method="true_response"), + Output(display_name="False", name="false_result", method="false_response"), + ] + + def _pre_run_setup(self): + self.__iteration_updated = False + + def evaluate_condition(self, input_text: str, match_text: str, operator: str, *, case_sensitive: bool) -> bool: + if not case_sensitive: + input_text = input_text.lower() + match_text = match_text.lower() + + if operator == "equals": + return input_text == match_text + if operator == "not equals": + return input_text != match_text + if operator == "contains": + return match_text in input_text + if operator == "starts with": + return input_text.startswith(match_text) + if operator == "ends with": + return input_text.endswith(match_text) + return False + + def iterate_and_stop_once(self, route_to_stop: str): + if not self.__iteration_updated: + self.update_ctx({f"{self._id}_iteration": self.ctx.get(f"{self._id}_iteration", 0) + 1}) + self.__iteration_updated = True + if self.ctx.get(f"{self._id}_iteration", 0) >= self.max_iterations and route_to_stop == self.default_route: + # We need to stop the other route + route_to_stop = "true_result" if route_to_stop == "false_result" else "false_result" + self.stop(route_to_stop) + + def true_response(self) -> Message: + result = self.evaluate_condition( + self.input_text, self.match_text, self.operator, case_sensitive=self.case_sensitive + ) + if result: + self.status = self.message + self.iterate_and_stop_once("false_result") + return self.message + self.iterate_and_stop_once("true_result") + return self.message + + def false_response(self) -> Message: + result = self.evaluate_condition( + self.input_text, self.match_text, self.operator, case_sensitive=self.case_sensitive + ) + if not result: + self.status = self.message + self.iterate_and_stop_once("true_result") + return self.message + self.iterate_and_stop_once("false_result") + return self.message diff --git a/src/backend/base/langflow/components/logic/data_conditional_router.py b/src/backend/base/langflow/components/logic/data_conditional_router.py new file mode 100644 index 000000000000..a107458bbe5e --- /dev/null +++ b/src/backend/base/langflow/components/logic/data_conditional_router.py @@ -0,0 +1,124 @@ +from typing import Any + +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, MessageTextInput, Output +from langflow.schema import Data, dotdict + + +class DataConditionalRouterComponent(Component): + display_name = "Condition" + description = "Route Data object(s) based on a condition applied to a specified key, including boolean validation." + icon = "split" + name = "DataConditionalRouter" + legacy = True + + inputs = [ + DataInput( + name="data_input", + display_name="Data Input", + info="The Data object or list of Data objects to process", + is_list=True, + ), + MessageTextInput( + name="key_name", + display_name="Key Name", + info="The name of the key in the Data object(s) to check", + ), + DropdownInput( + name="operator", + display_name="Operator", + options=["equals", "not equals", "contains", "starts with", "ends with", "boolean validator"], + info="The operator to apply for comparing the values. 'boolean validator' treats the value as a boolean.", + value="equals", + ), + MessageTextInput( + name="compare_value", + display_name="Match Text", + info="The value to compare against (not used for boolean validator)", + ), + ] + + outputs = [ + Output(display_name="True Output", name="true_output", method="process_data"), + Output(display_name="False Output", name="false_output", method="process_data"), + ] + + def compare_values(self, item_value: str, compare_value: str, operator: str) -> bool: + if operator == "equals": + return item_value == compare_value + if operator == "not equals": + return item_value != compare_value + if operator == "contains": + return compare_value in item_value + if operator == "starts with": + return item_value.startswith(compare_value) + if operator == "ends with": + return item_value.endswith(compare_value) + if operator == "boolean validator": + return self.parse_boolean(item_value) + return False + + def parse_boolean(self, value): + if isinstance(value, bool): + return value + if isinstance(value, str): + return value.lower() in {"true", "1", "yes", "y", "on"} + return bool(value) + + def validate_input(self, data_item: Data) -> bool: + if not isinstance(data_item, Data): + self.status = "Input is not a Data object" + return False + if self.key_name not in data_item.data: + self.status = f"Key '{self.key_name}' not found in Data" + return False + return True + + def process_data(self) -> Data | list[Data]: + if isinstance(self.data_input, list): + true_output = [] + false_output = [] + for item in self.data_input: + if self.validate_input(item): + result = self.process_single_data(item) + if result: + true_output.append(item) + else: + false_output.append(item) + self.stop("false_output" if true_output else "true_output") + return true_output or false_output + if not self.validate_input(self.data_input): + return Data(data={"error": self.status}) + result = self.process_single_data(self.data_input) + self.stop("false_output" if result else "true_output") + return self.data_input + + def process_single_data(self, data_item: Data) -> bool: + item_value = data_item.data[self.key_name] + operator = self.operator + + if operator == "boolean validator": + condition_met = self.parse_boolean(item_value) + condition_description = f"Boolean validation of '{self.key_name}'" + else: + compare_value = self.compare_value + condition_met = self.compare_values(str(item_value), compare_value, operator) + condition_description = f"{self.key_name} {operator} {compare_value}" + + if condition_met: + self.status = f"Condition met: {condition_description}" + return True + self.status = f"Condition not met: {condition_description}" + return False + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "operator": + if field_value == "boolean validator": + build_config["compare_value"]["show"] = False + build_config["compare_value"]["advanced"] = True + build_config["compare_value"]["value"] = None + else: + build_config["compare_value"]["show"] = True + build_config["compare_value"]["advanced"] = False + + return build_config diff --git a/src/backend/base/langflow/components/logic/flow_tool.py b/src/backend/base/langflow/components/logic/flow_tool.py new file mode 100644 index 000000000000..19b4ccc9cc34 --- /dev/null +++ b/src/backend/base/langflow/components/logic/flow_tool.py @@ -0,0 +1,109 @@ +from typing import Any + +from loguru import logger +from typing_extensions import override + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.base.tools.flow_tool import FlowTool +from langflow.field_typing import Tool +from langflow.graph.graph.base import Graph +from langflow.helpers.flow import get_flow_inputs +from langflow.io import BoolInput, DropdownInput, Output, StrInput +from langflow.schema import Data +from langflow.schema.dotdict import dotdict + + +class FlowToolComponent(LCToolComponent): + display_name = "Flow as Tool" + description = "Construct a Tool from a function that runs the loaded Flow." + field_order = ["flow_name", "name", "description", "return_direct"] + trace_type = "tool" + name = "FlowTool" + beta = True + icon = "hammer" + + def get_flow_names(self) -> list[str]: + flow_datas = self.list_flows() + return [flow_data.data["name"] for flow_data in flow_datas] + + def get_flow(self, flow_name: str) -> Data | None: + """Retrieves a flow by its name. + + Args: + flow_name (str): The name of the flow to retrieve. + + Returns: + Optional[Text]: The flow record if found, None otherwise. + """ + flow_datas = self.list_flows() + for flow_data in flow_datas: + if flow_data.data["name"] == flow_name: + return flow_data + return None + + @override + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + + return build_config + + inputs = [ + DropdownInput( + name="flow_name", display_name="Flow Name", info="The name of the flow to run.", refresh_button=True + ), + StrInput( + name="tool_name", + display_name="Name", + info="The name of the tool.", + ), + StrInput( + name="tool_description", + display_name="Description", + info="The description of the tool.", + ), + BoolInput( + name="return_direct", + display_name="Return Direct", + info="Return the result directly from the Tool.", + advanced=True, + ), + ] + + outputs = [ + Output(name="api_build_tool", display_name="Tool", method="build_tool"), + ] + + def build_tool(self) -> Tool: + FlowTool.model_rebuild() + if "flow_name" not in self._attributes or not self._attributes["flow_name"]: + msg = "Flow name is required" + raise ValueError(msg) + flow_name = self._attributes["flow_name"] + flow_data = self.get_flow(flow_name) + if not flow_data: + msg = "Flow not found." + raise ValueError(msg) + graph = Graph.from_payload( + flow_data.data["data"], + user_id=str(self.user_id), + ) + try: + graph.set_run_id(self.graph.run_id) + except Exception: # noqa: BLE001 + logger.opt(exception=True).warning("Failed to set run_id") + inputs = get_flow_inputs(graph) + tool = FlowTool( + name=self.tool_name, + description=self.tool_description, + graph=graph, + return_direct=self.return_direct, + inputs=inputs, + flow_id=str(flow_data.id), + user_id=str(self.user_id), + session_id=self.graph.session_id if hasattr(self, "graph") else None, + ) + description_repr = repr(tool.description).strip("'") + args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()]) + self.status = f"{description_repr}\nArguments:\n{args_str}" + return tool diff --git a/src/backend/base/langflow/components/logic/listen.py b/src/backend/base/langflow/components/logic/listen.py new file mode 100644 index 000000000000..e2e6afa48059 --- /dev/null +++ b/src/backend/base/langflow/components/logic/listen.py @@ -0,0 +1,29 @@ +from langflow.custom import CustomComponent +from langflow.schema import Data + + +class ListenComponent(CustomComponent): + display_name = "Listen" + description = "A component to listen for a notification." + name = "Listen" + beta: bool = True + icon = "Radio" + + def build_config(self): + return { + "name": { + "display_name": "Name", + "info": "The name of the notification to listen for.", + }, + } + + def build(self, name: str) -> Data: + state = self.get_state(name) + self._set_successors_ids() + self.status = state + return state + + def _set_successors_ids(self): + self._vertex.is_state = True + successors = self._vertex.graph.successor_map.get(self._vertex.id, []) + return successors + self._vertex.graph.activated_vertices diff --git a/src/backend/base/langflow/components/logic/notify.py b/src/backend/base/langflow/components/logic/notify.py new file mode 100644 index 000000000000..165b145fd42c --- /dev/null +++ b/src/backend/base/langflow/components/logic/notify.py @@ -0,0 +1,46 @@ +from langflow.custom import CustomComponent +from langflow.schema import Data + + +class NotifyComponent(CustomComponent): + display_name = "Notify" + description = "A component to generate a notification to Get Notified component." + icon = "Notify" + name = "Notify" + beta: bool = True + + def build_config(self): + return { + "name": {"display_name": "Name", "info": "The name of the notification."}, + "data": {"display_name": "Data", "info": "The data to store."}, + "append": { + "display_name": "Append", + "info": "If True, the record will be appended to the notification.", + }, + } + + def build(self, name: str, *, data: Data | None = None, append: bool = False) -> Data: + if data and not isinstance(data, Data): + if isinstance(data, str): + data = Data(text=data) + elif isinstance(data, dict): + data = Data(data=data) + else: + data = Data(text=str(data)) + elif not data: + data = Data(text="") + if data: + if append: + self.append_state(name, data) + else: + self.update_state(name, data) + else: + self.status = "No record provided." + self.status = data + self._set_successors_ids() + return data + + def _set_successors_ids(self): + self._vertex.is_state = True + successors = self._vertex.graph.successor_map.get(self._vertex.id, []) + return successors + self._vertex.graph.activated_vertices diff --git a/src/backend/base/langflow/components/prototypes/Pass.py b/src/backend/base/langflow/components/logic/pass_message.py similarity index 100% rename from src/backend/base/langflow/components/prototypes/Pass.py rename to src/backend/base/langflow/components/logic/pass_message.py diff --git a/src/backend/base/langflow/components/logic/run_flow.py b/src/backend/base/langflow/components/logic/run_flow.py new file mode 100644 index 000000000000..2fac486105fa --- /dev/null +++ b/src/backend/base/langflow/components/logic/run_flow.py @@ -0,0 +1,74 @@ +from typing import TYPE_CHECKING, Any + +from typing_extensions import override + +from langflow.base.flow_processing.utils import build_data_from_run_outputs +from langflow.custom import Component +from langflow.io import DropdownInput, MessageTextInput, NestedDictInput, Output +from langflow.schema import Data, dotdict + +if TYPE_CHECKING: + from langflow.graph.schema import RunOutputs + + +class RunFlowComponent(Component): + display_name = "Run Flow" + description = "A component to run a flow." + name = "RunFlow" + legacy: bool = True + icon = "workflow" + + def get_flow_names(self) -> list[str]: + flow_data = self.list_flows() + return [flow_data.data["name"] for flow_data in flow_data] + + @override + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + + return build_config + + inputs = [ + MessageTextInput( + name="input_value", + display_name="Input Value", + info="The input value to be processed by the flow.", + ), + DropdownInput( + name="flow_name", + display_name="Flow Name", + info="The name of the flow to run.", + options=[], + refresh_button=True, + ), + NestedDictInput( + name="tweaks", + display_name="Tweaks", + info="Tweaks to apply to the flow.", + ), + ] + + outputs = [ + Output(display_name="Run Outputs", name="run_outputs", method="generate_results"), + ] + + async def generate_results(self) -> list[Data]: + if "flow_name" not in self._attributes or not self._attributes["flow_name"]: + msg = "Flow name is required" + raise ValueError(msg) + flow_name = self._attributes["flow_name"] + + results: list[RunOutputs | None] = await self.run_flow( + inputs={"input_value": self.input_value}, flow_name=flow_name, tweaks=self.tweaks + ) + if isinstance(results, list): + data = [] + for result in results: + if result: + data.extend(build_data_from_run_outputs(result)) + else: + data = build_data_from_run_outputs()(results) + + self.status = data + return data diff --git a/src/backend/base/langflow/components/logic/sub_flow.py b/src/backend/base/langflow/components/logic/sub_flow.py new file mode 100644 index 000000000000..2fb0fe9586be --- /dev/null +++ b/src/backend/base/langflow/components/logic/sub_flow.py @@ -0,0 +1,114 @@ +from typing import Any + +from loguru import logger + +from langflow.base.flow_processing.utils import build_data_from_result_data +from langflow.custom import Component +from langflow.graph.graph.base import Graph +from langflow.graph.vertex.base import Vertex +from langflow.helpers.flow import get_flow_inputs +from langflow.io import DropdownInput, Output +from langflow.schema import Data, dotdict + + +class SubFlowComponent(Component): + display_name = "Sub Flow" + description = "Generates a Component from a Flow, with all of its inputs, and " + name = "SubFlow" + beta: bool = True + icon = "Workflow" + + def get_flow_names(self) -> list[str]: + flow_data = self.list_flows() + return [flow_data.data["name"] for flow_data in flow_data] + + def get_flow(self, flow_name: str) -> Data | None: + flow_datas = self.list_flows() + for flow_data in flow_datas: + if flow_data.data["name"] == flow_name: + return flow_data + return None + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + + for key in list(build_config.keys()): + if key not in [x.name for x in self.inputs] + ["code", "_type", "get_final_results_only"]: + del build_config[key] + if field_value is not None and field_name == "flow_name": + try: + flow_data = self.get_flow(field_value) + except Exception: # noqa: BLE001 + logger.exception(f"Error getting flow {field_value}") + else: + if not flow_data: + msg = f"Flow {field_value} not found." + logger.error(msg) + else: + try: + graph = Graph.from_payload(flow_data.data["data"]) + # Get all inputs from the graph + inputs = get_flow_inputs(graph) + # Add inputs to the build config + build_config = self.add_inputs_to_build_config(inputs, build_config) + except Exception: # noqa: BLE001 + logger.exception(f"Error building graph for flow {field_value}") + + return build_config + + def add_inputs_to_build_config(self, inputs_vertex: list[Vertex], build_config: dotdict): + new_fields: list[dotdict] = [] + + for vertex in inputs_vertex: + new_vertex_inputs = [] + field_template = vertex.data["node"]["template"] + for inp in field_template: + if inp not in {"code", "_type"}: + field_template[inp]["display_name"] = ( + vertex.display_name + " - " + field_template[inp]["display_name"] + ) + field_template[inp]["name"] = vertex.id + "|" + inp + new_vertex_inputs.append(field_template[inp]) + new_fields += new_vertex_inputs + for field in new_fields: + build_config[field["name"]] = field + return build_config + + inputs = [ + DropdownInput( + name="flow_name", + display_name="Flow Name", + info="The name of the flow to run.", + options=[], + refresh_button=True, + real_time_refresh=True, + ), + ] + + outputs = [Output(name="flow_outputs", display_name="Flow Outputs", method="generate_results")] + + async def generate_results(self) -> list[Data]: + tweaks: dict = {} + for field in self._attributes: + if field != "flow_name" and "|" in field: + [node, name] = field.split("|") + if node not in tweaks: + tweaks[node] = {} + tweaks[node][name] = self._attributes[field] + flow_name = self._attributes.get("flow_name") + run_outputs = await self.run_flow( + tweaks=tweaks, + flow_name=flow_name, + output_type="all", + ) + data: list[Data] = [] + if not run_outputs: + return data + run_output = run_outputs[0] + + if run_output is not None: + for output in run_output.outputs: + if output: + data.extend(build_data_from_result_data(output)) + return data diff --git a/src/backend/base/langflow/components/memories/AstraDBChatMemory.py b/src/backend/base/langflow/components/memories/AstraDBChatMemory.py deleted file mode 100644 index 29f751fe2f5f..000000000000 --- a/src/backend/base/langflow/components/memories/AstraDBChatMemory.py +++ /dev/null @@ -1,63 +0,0 @@ -from langflow.base.memory.model import LCChatMemoryComponent -from langflow.inputs import MessageTextInput, StrInput, SecretStrInput -from langflow.field_typing import BaseChatMessageHistory - - -class AstraDBChatMemory(LCChatMemoryComponent): - display_name = "Astra DB Chat Memory" - description = "Retrieves and store chat messages from Astra DB." - name = "AstraDBChatMemory" - icon: str = "AstraDB" - - inputs = [ - StrInput( - name="collection_name", - display_name="Collection Name", - info="The name of the collection within Astra DB where the vectors will be stored.", - required=True, - ), - SecretStrInput( - name="token", - display_name="Astra DB Application Token", - info="Authentication token for accessing Astra DB.", - value="ASTRA_DB_APPLICATION_TOKEN", - required=True, - ), - SecretStrInput( - name="api_endpoint", - display_name="API Endpoint", - info="API endpoint URL for the Astra DB service.", - value="ASTRA_DB_API_ENDPOINT", - required=True, - ), - StrInput( - name="namespace", - display_name="Namespace", - info="Optional namespace within Astra DB to use for the collection.", - advanced=True, - ), - MessageTextInput( - name="session_id", - display_name="Session ID", - info="The session ID of the chat. If empty, the current session ID parameter will be used.", - advanced=True, - ), - ] - - def build_message_history(self) -> BaseChatMessageHistory: - try: - from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory - except ImportError: - raise ImportError( - "Could not import langchain Astra DB integration package. " - "Please install it with `pip install langchain-astradb`." - ) - - memory = AstraDBChatMessageHistory( - session_id=self.session_id, - collection_name=self.collection_name, - token=self.token, - api_endpoint=self.api_endpoint, - namespace=self.namespace or None, - ) - return memory diff --git a/src/backend/base/langflow/components/memories/CassandraChatMemory.py b/src/backend/base/langflow/components/memories/CassandraChatMemory.py deleted file mode 100644 index 4891122ab39e..000000000000 --- a/src/backend/base/langflow/components/memories/CassandraChatMemory.py +++ /dev/null @@ -1,93 +0,0 @@ -from langflow.base.memory.model import LCChatMemoryComponent -from langflow.inputs import MessageTextInput, SecretStrInput, DictInput -from langflow.field_typing import BaseChatMessageHistory - - -class CassandraChatMemory(LCChatMemoryComponent): - display_name = "Cassandra Chat Memory" - description = "Retrieves and store chat messages from Apache Cassandra." - name = "CassandraChatMemory" - icon = "Cassandra" - - inputs = [ - MessageTextInput( - name="database_ref", - display_name="Contact Points / Astra Database ID", - info="Contact points for the database (or AstraDB database ID)", - required=True, - ), - MessageTextInput( - name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)." - ), - SecretStrInput( - name="token", - display_name="Password / AstraDB Token", - info="User password for the database (or AstraDB token).", - required=True, - ), - MessageTextInput( - name="keyspace", - display_name="Keyspace", - info="Table Keyspace (or AstraDB namespace).", - required=True, - ), - MessageTextInput( - name="table_name", - display_name="Table Name", - info="The name of the table (or AstraDB collection) where vectors will be stored.", - required=True, - ), - MessageTextInput( - name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True - ), - DictInput( - name="cluster_kwargs", - display_name="Cluster arguments", - info="Optional dictionary of additional keyword arguments for the Cassandra cluster.", - advanced=True, - is_list=True, - ), - ] - - def build_message_history(self) -> BaseChatMessageHistory: - from langchain_community.chat_message_histories import CassandraChatMessageHistory - - try: - import cassio - except ImportError: - raise ImportError( - "Could not import cassio integration package. " "Please install it with `pip install cassio`." - ) - - from uuid import UUID - - database_ref = self.database_ref - - try: - UUID(self.database_ref) - is_astra = True - except ValueError: - is_astra = False - if "," in self.database_ref: - # use a copy because we can't change the type of the parameter - database_ref = self.database_ref.split(",") - - if is_astra: - cassio.init( - database_id=database_ref, - token=self.token, - cluster_kwargs=self.cluster_kwargs, - ) - else: - cassio.init( - contact_points=database_ref, - username=self.username, - password=self.token, - cluster_kwargs=self.cluster_kwargs, - ) - - return CassandraChatMessageHistory( - session_id=self.session_id, - table_name=self.table_name, - keyspace=self.keyspace, - ) diff --git a/src/backend/base/langflow/components/memories/ZepChatMemory.py b/src/backend/base/langflow/components/memories/ZepChatMemory.py deleted file mode 100644 index 36d740a52ed8..000000000000 --- a/src/backend/base/langflow/components/memories/ZepChatMemory.py +++ /dev/null @@ -1,43 +0,0 @@ -from langflow.base.memory.model import LCChatMemoryComponent -from langflow.inputs import MessageTextInput, SecretStrInput, DropdownInput -from langflow.field_typing import BaseChatMessageHistory - - -class ZepChatMemory(LCChatMemoryComponent): - display_name = "Zep Chat Memory" - description = "Retrieves and store chat messages from Zep." - name = "ZepChatMemory" - - inputs = [ - MessageTextInput(name="url", display_name="Zep URL", info="URL of the Zep instance."), - SecretStrInput(name="api_key", display_name="API Key", info="API Key for the Zep instance."), - DropdownInput( - name="api_base_path", - display_name="API Base Path", - options=["api/v1", "api/v2"], - value="api/v1", - advanced=True, - ), - MessageTextInput( - name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True - ), - ] - - def build_message_history(self) -> BaseChatMessageHistory: - try: - # Monkeypatch API_BASE_PATH to - # avoid 404 - # This is a workaround for the local Zep instance - # cloud Zep works with v2 - import zep_python.zep_client - from zep_python import ZepClient - from zep_python.langchain import ZepChatMessageHistory - - zep_python.zep_client.API_BASE_PATH = self.api_base_path - except ImportError: - raise ImportError( - "Could not import zep-python package. " "Please install it with `pip install zep-python`." - ) - - zep_client = ZepClient(api_url=self.url, api_key=self.api_key) - return ZepChatMessageHistory(session_id=self.session_id, zep_client=zep_client) diff --git a/src/backend/base/langflow/components/memories/__init__.py b/src/backend/base/langflow/components/memories/__init__.py index e69de29bb2d1..292950c42814 100644 --- a/src/backend/base/langflow/components/memories/__init__.py +++ b/src/backend/base/langflow/components/memories/__init__.py @@ -0,0 +1,13 @@ +from .astra_db import AstraDBChatMemory +from .cassandra import CassandraChatMemory +from .mem0_chat_memory import Mem0MemoryComponent +from .redis import RedisIndexChatMemory +from .zep import ZepChatMemory + +__all__ = [ + "AstraDBChatMemory", + "CassandraChatMemory", + "RedisIndexChatMemory", + "ZepChatMemory", + "Mem0MemoryComponent", +] diff --git a/src/backend/base/langflow/components/memories/astra_db.py b/src/backend/base/langflow/components/memories/astra_db.py new file mode 100644 index 000000000000..be53f03e91ae --- /dev/null +++ b/src/backend/base/langflow/components/memories/astra_db.py @@ -0,0 +1,69 @@ +import os + +from astrapy.admin import parse_api_endpoint + +from langflow.base.memory.model import LCChatMemoryComponent +from langflow.field_typing import BaseChatMessageHistory +from langflow.inputs import MessageTextInput, SecretStrInput, StrInput + + +class AstraDBChatMemory(LCChatMemoryComponent): + display_name = "Astra DB Chat Memory" + description = "Retrieves and store chat messages from Astra DB." + name = "AstraDBChatMemory" + icon: str = "AstraDB" + + inputs = [ + SecretStrInput( + name="token", + display_name="Astra DB Application Token", + info="Authentication token for accessing Astra DB.", + value="ASTRA_DB_APPLICATION_TOKEN", + required=True, + advanced=os.getenv("ASTRA_ENHANCED", "false").lower() == "true", + ), + SecretStrInput( + name="api_endpoint", + display_name="API Endpoint", + info="API endpoint URL for the Astra DB service.", + value="ASTRA_DB_API_ENDPOINT", + required=True, + ), + StrInput( + name="collection_name", + display_name="Collection Name", + info="The name of the collection within Astra DB where the vectors will be stored.", + required=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Optional namespace within Astra DB to use for the collection.", + advanced=True, + ), + MessageTextInput( + name="session_id", + display_name="Session ID", + info="The session ID of the chat. If empty, the current session ID parameter will be used.", + advanced=True, + ), + ] + + def build_message_history(self) -> BaseChatMessageHistory: + try: + from langchain_astradb.chat_message_histories import AstraDBChatMessageHistory + except ImportError as e: + msg = ( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + raise ImportError(msg) from e + + return AstraDBChatMessageHistory( + session_id=self.session_id, + collection_name=self.collection_name, + token=self.token, + api_endpoint=self.api_endpoint, + namespace=self.namespace or None, + environment=parse_api_endpoint(self.api_endpoint).environment, + ) diff --git a/src/backend/base/langflow/components/memories/cassandra.py b/src/backend/base/langflow/components/memories/cassandra.py new file mode 100644 index 000000000000..8bb1b6bb40fd --- /dev/null +++ b/src/backend/base/langflow/components/memories/cassandra.py @@ -0,0 +1,92 @@ +from langflow.base.memory.model import LCChatMemoryComponent +from langflow.field_typing import BaseChatMessageHistory +from langflow.inputs import DictInput, MessageTextInput, SecretStrInput + + +class CassandraChatMemory(LCChatMemoryComponent): + display_name = "Cassandra Chat Memory" + description = "Retrieves and store chat messages from Apache Cassandra." + name = "CassandraChatMemory" + icon = "Cassandra" + + inputs = [ + MessageTextInput( + name="database_ref", + display_name="Contact Points / Astra Database ID", + info="Contact points for the database (or AstraDB database ID)", + required=True, + ), + MessageTextInput( + name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)." + ), + SecretStrInput( + name="token", + display_name="Password / AstraDB Token", + info="User password for the database (or AstraDB token).", + required=True, + ), + MessageTextInput( + name="keyspace", + display_name="Keyspace", + info="Table Keyspace (or AstraDB namespace).", + required=True, + ), + MessageTextInput( + name="table_name", + display_name="Table Name", + info="The name of the table (or AstraDB collection) where vectors will be stored.", + required=True, + ), + MessageTextInput( + name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True + ), + DictInput( + name="cluster_kwargs", + display_name="Cluster arguments", + info="Optional dictionary of additional keyword arguments for the Cassandra cluster.", + advanced=True, + is_list=True, + ), + ] + + def build_message_history(self) -> BaseChatMessageHistory: + from langchain_community.chat_message_histories import CassandraChatMessageHistory + + try: + import cassio + except ImportError as e: + msg = "Could not import cassio integration package. Please install it with `pip install cassio`." + raise ImportError(msg) from e + + from uuid import UUID + + database_ref = self.database_ref + + try: + UUID(self.database_ref) + is_astra = True + except ValueError: + is_astra = False + if "," in self.database_ref: + # use a copy because we can't change the type of the parameter + database_ref = self.database_ref.split(",") + + if is_astra: + cassio.init( + database_id=database_ref, + token=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + else: + cassio.init( + contact_points=database_ref, + username=self.username, + password=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + + return CassandraChatMessageHistory( + session_id=self.session_id, + table_name=self.table_name, + keyspace=self.keyspace, + ) diff --git a/src/backend/base/langflow/components/memories/mem0_chat_memory.py b/src/backend/base/langflow/components/memories/mem0_chat_memory.py new file mode 100644 index 000000000000..251a1c798172 --- /dev/null +++ b/src/backend/base/langflow/components/memories/mem0_chat_memory.py @@ -0,0 +1,144 @@ +import logging +import os + +from mem0 import Memory, MemoryClient + +from langflow.base.memory.model import LCChatMemoryComponent +from langflow.inputs import ( + DictInput, + HandleInput, + MessageTextInput, + NestedDictInput, + SecretStrInput, +) +from langflow.io import Output +from langflow.schema import Data + +logger = logging.getLogger(__name__) + + +class Mem0MemoryComponent(LCChatMemoryComponent): + display_name = "Mem0 Chat Memory" + description = "Retrieves and stores chat messages using Mem0 memory storage." + name = "mem0_chat_memory" + icon: str = "Mem0" + inputs = [ + NestedDictInput( + name="mem0_config", + display_name="Mem0 Configuration", + info="""Configuration dictionary for initializing Mem0 memory instance. + Example: + { + "graph_store": { + "provider": "neo4j", + "config": { + "url": "neo4j+s://your-neo4j-url", + "username": "neo4j", + "password": "your-password" + } + }, + "version": "v1.1" + }""", + input_types=["Data"], + ), + MessageTextInput( + name="ingest_message", + display_name="Message to Ingest", + info="The message content to be ingested into Mem0 memory.", + ), + HandleInput( + name="existing_memory", + display_name="Existing Memory Instance", + input_types=["Memory"], + info="Optional existing Mem0 memory instance. If not provided, a new instance will be created.", + ), + MessageTextInput( + name="user_id", display_name="User ID", info="Identifier for the user associated with the messages." + ), + MessageTextInput( + name="search_query", display_name="Search Query", info="Input text for searching related memories in Mem0." + ), + SecretStrInput( + name="mem0_api_key", + display_name="Mem0 API Key", + info="API key for Mem0 platform. Leave empty to use the local version.", + ), + DictInput( + name="metadata", + display_name="Metadata", + info="Additional metadata to associate with the ingested message.", + advanced=True, + ), + SecretStrInput( + name="openai_api_key", + display_name="OpenAI API Key", + required=False, + info="API key for OpenAI. Required if using OpenAI Embeddings without a provided configuration.", + ), + ] + + outputs = [ + Output(name="memory", display_name="Mem0 Memory", method="ingest_data"), + Output( + name="search_results", + display_name="Search Results", + method="build_search_results", + ), + ] + + def build_mem0(self) -> Memory: + """Initializes a Mem0 memory instance based on provided configuration and API keys.""" + if self.openai_api_key: + os.environ["OPENAI_API_KEY"] = self.openai_api_key + + try: + if not self.mem0_api_key: + return Memory.from_config(config_dict=dict(self.mem0_config)) if self.mem0_config else Memory() + if self.mem0_config: + return MemoryClient.from_config(api_key=self.mem0_api_key, config_dict=dict(self.mem0_config)) + return MemoryClient(api_key=self.mem0_api_key) + except ImportError as e: + msg = "Mem0 is not properly installed. Please install it with 'pip install -U mem0ai'." + raise ImportError(msg) from e + + def ingest_data(self) -> Memory: + """Ingests a new message into Mem0 memory and returns the updated memory instance.""" + mem0_memory = self.existing_memory if self.existing_memory else self.build_mem0() + + if not self.ingest_message or not self.user_id: + logger.warning("Missing 'ingest_message' or 'user_id'; cannot ingest data.") + return mem0_memory + + metadata = self.metadata if self.metadata else {} + + logger.info("Ingesting message for user_id: %s", self.user_id) + + try: + mem0_memory.add(self.ingest_message, user_id=self.user_id, metadata=metadata) + except Exception: + logger.exception("Failed to add message to Mem0 memory.") + raise + + return mem0_memory + + def build_search_results(self) -> Data: + """Searches the Mem0 memory for related messages based on the search query and returns the results.""" + mem0_memory = self.ingest_data() + search_query = self.search_query + user_id = self.user_id + + logger.info("Search query: %s", search_query) + + try: + if search_query: + logger.info("Performing search with query.") + related_memories = mem0_memory.search(query=search_query, user_id=user_id) + else: + logger.info("Retrieving all memories for user_id: %s", user_id) + related_memories = mem0_memory.get_all(user_id=user_id) + except Exception: + logger.exception("Failed to retrieve related memories from Mem0.") + raise + + logger.info("Related memories retrieved: %s", related_memories) + return related_memories diff --git a/src/backend/base/langflow/components/memories/redis.py b/src/backend/base/langflow/components/memories/redis.py new file mode 100644 index 000000000000..39838e871e79 --- /dev/null +++ b/src/backend/base/langflow/components/memories/redis.py @@ -0,0 +1,43 @@ +from urllib import parse + +from langchain_community.chat_message_histories.redis import RedisChatMessageHistory + +from langflow.base.memory.model import LCChatMemoryComponent +from langflow.field_typing import BaseChatMessageHistory +from langflow.inputs import IntInput, MessageTextInput, SecretStrInput, StrInput + + +class RedisIndexChatMemory(LCChatMemoryComponent): + display_name = "Redis Chat Memory" + description = "Retrieves and store chat messages from Redis." + name = "RedisChatMemory" + icon = "Redis" + + inputs = [ + StrInput( + name="host", display_name="hostname", required=True, value="localhost", info="IP address or hostname." + ), + IntInput(name="port", display_name="port", required=True, value=6379, info="Redis Port Number."), + StrInput(name="database", display_name="database", required=True, value="0", info="Redis database."), + MessageTextInput( + name="username", display_name="Username", value="", info="The Redis user name.", advanced=True + ), + SecretStrInput( + name="password", display_name="Password", value="", info="The password for username.", advanced=True + ), + StrInput(name="key_prefix", display_name="Key prefix", info="Key prefix.", advanced=True), + MessageTextInput( + name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True + ), + ] + + def build_message_history(self) -> BaseChatMessageHistory: + kwargs = {} + password: str | None = self.password + if self.key_prefix: + kwargs["key_prefix"] = self.key_prefix + if password: + password = parse.quote_plus(password) + + url = f"redis://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}" + return RedisChatMessageHistory(session_id=self.session_id, url=url, **kwargs) diff --git a/src/backend/base/langflow/components/memories/zep.py b/src/backend/base/langflow/components/memories/zep.py new file mode 100644 index 000000000000..d9a4905cba1b --- /dev/null +++ b/src/backend/base/langflow/components/memories/zep.py @@ -0,0 +1,43 @@ +from langflow.base.memory.model import LCChatMemoryComponent +from langflow.field_typing import BaseChatMessageHistory +from langflow.inputs import DropdownInput, MessageTextInput, SecretStrInput + + +class ZepChatMemory(LCChatMemoryComponent): + display_name = "Zep Chat Memory" + description = "Retrieves and store chat messages from Zep." + name = "ZepChatMemory" + icon = "ZepMemory" + + inputs = [ + MessageTextInput(name="url", display_name="Zep URL", info="URL of the Zep instance."), + SecretStrInput(name="api_key", display_name="API Key", info="API Key for the Zep instance."), + DropdownInput( + name="api_base_path", + display_name="API Base Path", + options=["api/v1", "api/v2"], + value="api/v1", + advanced=True, + ), + MessageTextInput( + name="session_id", display_name="Session ID", info="Session ID for the message.", advanced=True + ), + ] + + def build_message_history(self) -> BaseChatMessageHistory: + try: + # Monkeypatch API_BASE_PATH to + # avoid 404 + # This is a workaround for the local Zep instance + # cloud Zep works with v2 + import zep_python.zep_client + from zep_python import ZepClient + from zep_python.langchain import ZepChatMessageHistory + + zep_python.zep_client.API_BASE_PATH = self.api_base_path + except ImportError as e: + msg = "Could not import zep-python package. Please install it with `pip install zep-python`." + raise ImportError(msg) from e + + zep_client = ZepClient(api_url=self.url, api_key=self.api_key) + return ZepChatMessageHistory(session_id=self.session_id, zep_client=zep_client) diff --git a/src/backend/base/langflow/components/models/AIMLModel.py b/src/backend/base/langflow/components/models/AIMLModel.py deleted file mode 100644 index 65ac0176db09..000000000000 --- a/src/backend/base/langflow/components/models/AIMLModel.py +++ /dev/null @@ -1,108 +0,0 @@ -from langflow.field_typing.range_spec import RangeSpec -from langchain_openai import ChatOpenAI -from pydantic.v1 import SecretStr - -from langflow.base.models.aiml_constants import AIML_CHAT_MODELS -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import ( - DictInput, - DropdownInput, - FloatInput, - IntInput, - SecretStrInput, - StrInput, -) - - -class AIMLModelComponent(LCModelComponent): - display_name = "AIML" - description = "Generates text using AIML LLMs." - icon = "AIML" - name = "AIMLModel" - documentation = "https://docs.aimlapi.com/api-reference" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - range_spec=RangeSpec(min=0, max=128000), - ), - DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=AIML_CHAT_MODELS, - value=AIML_CHAT_MODELS[0], - ), - StrInput( - name="aiml_api_base", - display_name="AIML API Base", - advanced=True, - info="The base URL of the OpenAI API. Defaults to https://api.aimlapi.com . You can change this to use other APIs like JinaChat, LocalAI e Prem.", - ), - SecretStrInput( - name="api_key", - display_name="AIML API Key", - info="The AIML API Key to use for the OpenAI model.", - advanced=False, - value="AIML_API_KEY", - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1), - IntInput( - name="seed", - display_name="Seed", - info="The seed controls the reproducibility of the job.", - advanced=True, - value=1, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - aiml_api_key = self.api_key - temperature = self.temperature - model_name: str = self.model_name - max_tokens = self.max_tokens - model_kwargs = self.model_kwargs or {} - aiml_api_base = self.aiml_api_base or "https://api.aimlapi.com" - seed = self.seed - - if isinstance(aiml_api_key, SecretStr): - openai_api_key = aiml_api_key.get_secret_value() - else: - openai_api_key = aiml_api_key - - model = ChatOpenAI( - model=model_name, - temperature=temperature, - api_key=openai_api_key, - base_url=aiml_api_base, - max_tokens=max_tokens or None, - seed=seed, - **model_kwargs, - ) - - return model # type: ignore - - def _get_exception_message(self, e: Exception): - """ - Get a message from an OpenAI exception. - - Args: - exception (Exception): The exception to get the message from. - - Returns: - str: The message from the exception. - """ - try: - from openai.error import BadRequestError - except ImportError: - return None - if isinstance(e, BadRequestError): - message = e.json_body.get("error", {}).get("message", "") # type: ignore - if message: - return message - return None diff --git a/src/backend/base/langflow/components/models/AmazonBedrockModel.py b/src/backend/base/langflow/components/models/AmazonBedrockModel.py deleted file mode 100644 index 8f39bfa08177..000000000000 --- a/src/backend/base/langflow/components/models/AmazonBedrockModel.py +++ /dev/null @@ -1,99 +0,0 @@ -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import MessageTextInput, SecretStrInput -from langflow.io import DictInput, DropdownInput - - -class AmazonBedrockComponent(LCModelComponent): - display_name: str = "Amazon Bedrock" - description: str = "Generate text using Amazon Bedrock LLMs." - icon = "Amazon" - name = "AmazonBedrockModel" - - inputs = LCModelComponent._base_inputs + [ - DropdownInput( - name="model_id", - display_name="Model ID", - options=[ - "amazon.titan-text-express-v1", - "amazon.titan-text-lite-v1", - "amazon.titan-text-premier-v1:0", - "amazon.titan-embed-text-v1", - "amazon.titan-embed-text-v2:0", - "amazon.titan-embed-image-v1", - "amazon.titan-image-generator-v1", - "anthropic.claude-v2", - "anthropic.claude-v2:1", - "anthropic.claude-3-sonnet-20240229-v1:0", - "anthropic.claude-3-haiku-20240307-v1:0", - "anthropic.claude-3-opus-20240229-v1:0", - "anthropic.claude-instant-v1", - "ai21.j2-mid-v1", - "ai21.j2-ultra-v1", - "cohere.command-text-v14", - "cohere.command-light-text-v14", - "cohere.command-r-v1:0", - "cohere.command-r-plus-v1:0", - "cohere.embed-english-v3", - "cohere.embed-multilingual-v3", - "meta.llama2-13b-chat-v1", - "meta.llama2-70b-chat-v1", - "meta.llama3-8b-instruct-v1:0", - "meta.llama3-70b-instruct-v1:0", - "mistral.mistral-7b-instruct-v0:2", - "mistral.mixtral-8x7b-instruct-v0:1", - "mistral.mistral-large-2402-v1:0", - "mistral.mistral-small-2402-v1:0", - "stability.stable-diffusion-xl-v0", - "stability.stable-diffusion-xl-v1", - ], - value="anthropic.claude-3-haiku-20240307-v1:0", - ), - SecretStrInput(name="aws_access_key", display_name="Access Key"), - SecretStrInput(name="aws_secret_key", display_name="Secret Key"), - MessageTextInput(name="credentials_profile_name", display_name="Credentials Profile Name", advanced=True), - MessageTextInput(name="region_name", display_name="Region Name", value="us-east-1"), - DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True, is_list=True), - MessageTextInput(name="endpoint_url", display_name="Endpoint URL", advanced=True), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - try: - from langchain_aws import ChatBedrock - except ImportError: - raise ImportError("langchain_aws is not installed. Please install it with `pip install langchain_aws`.") - if self.aws_access_key: - import boto3 # type: ignore - - session = boto3.Session( - aws_access_key_id=self.aws_access_key, - aws_secret_access_key=self.aws_secret_key, - ) - elif self.credentials_profile_name: - import boto3 - - session = boto3.Session(profile_name=self.credentials_profile_name) - else: - import boto3 - - session = boto3.Session() - - client_params = {} - if self.endpoint_url: - client_params["endpoint_url"] = self.endpoint_url - if self.region_name: - client_params["region_name"] = self.region_name - - boto3_client = session.client("bedrock-runtime", **client_params) - try: - output = ChatBedrock( # type: ignore - client=boto3_client, - model_id=self.model_id, - region_name=self.region_name, - model_kwargs=self.model_kwargs, - endpoint_url=self.endpoint_url, - streaming=self.stream, - ) - except Exception as e: - raise ValueError("Could not connect to AmazonBedrock API.") from e - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py deleted file mode 100644 index 8f6b2f1314bb..000000000000 --- a/src/backend/base/langflow/components/models/AnthropicModel.py +++ /dev/null @@ -1,99 +0,0 @@ -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput - - -class AnthropicModelComponent(LCModelComponent): - display_name = "Anthropic" - description = "Generate text using Anthropic Chat&Completion LLMs with prefill support." - icon = "Anthropic" - name = "AnthropicModel" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - value=4096, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - ), - DropdownInput( - name="model", - display_name="Model Name", - options=[ - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - ], - info="https://python.langchain.com/docs/integrations/chat/anthropic", - value="claude-3-5-sonnet-20240620", - ), - SecretStrInput( - name="anthropic_api_key", - display_name="Anthropic API Key", - info="Your Anthropic API key.", - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1), - MessageTextInput( - name="anthropic_api_url", - display_name="Anthropic API URL", - advanced=True, - info="Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", - ), - MessageTextInput( - name="prefill", - display_name="Prefill", - info="Prefill text to guide the model's response.", - advanced=True, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - try: - from langchain_anthropic.chat_models import ChatAnthropic - except ImportError: - raise ImportError( - "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`." - ) - model = self.model - anthropic_api_key = self.anthropic_api_key - max_tokens = self.max_tokens - temperature = self.temperature - anthropic_api_url = self.anthropic_api_url or "https://api.anthropic.com" - - try: - output = ChatAnthropic( - model=model, - anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), - max_tokens_to_sample=max_tokens, # type: ignore - temperature=temperature, - anthropic_api_url=anthropic_api_url, - streaming=self.stream, - ) - except Exception as e: - raise ValueError("Could not connect to Anthropic API.") from e - - return output # type: ignore - - def _get_exception_message(self, exception: Exception) -> str | None: - """ - Get a message from an Anthropic exception. - - Args: - exception (Exception): The exception to get the message from. - - Returns: - str: The message from the exception. - """ - try: - from anthropic import BadRequestError - except ImportError: - return None - if isinstance(exception, BadRequestError): - message = exception.body.get("error", {}).get("message") # type: ignore - if message: - return message - return None diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py deleted file mode 100644 index c55e7cbc9011..000000000000 --- a/src/backend/base/langflow/components/models/AzureOpenAIModel.py +++ /dev/null @@ -1,74 +0,0 @@ -from langchain_openai import AzureChatOpenAI -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import MessageTextInput -from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput - - -class AzureChatOpenAIComponent(LCModelComponent): - display_name: str = "Azure OpenAI" - description: str = "Generate text using Azure OpenAI LLMs." - documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" - beta = False - icon = "Azure" - name = "AzureOpenAIModel" - - AZURE_OPENAI_API_VERSIONS = [ - "2023-03-15-preview", - "2023-05-15", - "2023-06-01-preview", - "2023-07-01-preview", - "2023-08-01-preview", - "2023-09-01-preview", - "2023-12-01-preview", - "2024-04-09", - "2024-05-13", - ] - - inputs = LCModelComponent._base_inputs + [ - MessageTextInput( - name="azure_endpoint", - display_name="Azure Endpoint", - info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`", - required=True, - ), - MessageTextInput(name="azure_deployment", display_name="Deployment Name", required=True), - SecretStrInput(name="api_key", display_name="API Key"), - DropdownInput( - name="api_version", - display_name="API Version", - options=AZURE_OPENAI_API_VERSIONS, - value=AZURE_OPENAI_API_VERSIONS[-1], - ), - FloatInput(name="temperature", display_name="Temperature", value=0.7), - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - azure_endpoint = self.azure_endpoint - azure_deployment = self.azure_deployment - api_version = self.api_version - api_key = self.api_key - temperature = self.temperature - max_tokens = self.max_tokens - stream = self.stream - - try: - output = AzureChatOpenAI( - azure_endpoint=azure_endpoint, - azure_deployment=azure_deployment, - api_version=api_version, - api_key=api_key, - temperature=temperature, - max_tokens=max_tokens or None, - streaming=stream, - ) - except Exception as e: - raise ValueError(f"Could not connect to AzureOpenAI API: {str(e)}") from e - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py deleted file mode 100644 index 6e638424858b..000000000000 --- a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py +++ /dev/null @@ -1,94 +0,0 @@ -from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing.constants import LanguageModel -from langflow.io import DropdownInput, FloatInput, MessageTextInput, SecretStrInput - - -class QianfanChatEndpointComponent(LCModelComponent): - display_name: str = "Qianfan" - description: str = "Generate text using Baidu Qianfan LLMs." - documentation: str = "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint" - icon = "BaiduQianfan" - name = "BaiduQianfanChatModel" - - inputs = LCModelComponent._base_inputs + [ - DropdownInput( - name="model", - display_name="Model Name", - options=[ - "ERNIE-Bot", - "ERNIE-Bot-turbo", - "BLOOMZ-7B", - "Llama-2-7b-chat", - "Llama-2-13b-chat", - "Llama-2-70b-chat", - "Qianfan-BLOOMZ-7B-compressed", - "Qianfan-Chinese-Llama-2-7B", - "ChatGLM2-6B-32K", - "AquilaChat-7B", - ], - info="https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint", - value="ERNIE-Bot-turbo", - ), - SecretStrInput( - name="qianfan_ak", - display_name="Qianfan Ak", - info="which you could get from https://cloud.baidu.com/product/wenxinworkshop", - ), - SecretStrInput( - name="qianfan_sk", - display_name="Qianfan Sk", - info="which you could get from https://cloud.baidu.com/product/wenxinworkshop", - ), - FloatInput( - name="top_p", - display_name="Top p", - info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", - value=0.8, - advanced=True, - ), - FloatInput( - name="temperature", - display_name="Temperature", - info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", - value=0.95, - ), - FloatInput( - name="penalty_score", - display_name="Penalty Score", - info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", - value=1.0, - advanced=True, - ), - MessageTextInput( - name="endpoint", - display_name="Endpoint", - info="Endpoint of the Qianfan LLM, required if custom model used.", - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - model = self.model - qianfan_ak = self.qianfan_ak - qianfan_sk = self.qianfan_sk - top_p = self.top_p - temperature = self.temperature - penalty_score = self.penalty_score - endpoint = self.endpoint - - try: - output = QianfanChatEndpoint( # type: ignore - model=model, - qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None, - qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None, - top_p=top_p, - temperature=temperature, - penalty_score=penalty_score, - endpoint=endpoint, - ) - except Exception as e: - raise ValueError("Could not connect to Baidu Qianfan API.") from e - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/CohereModel.py b/src/backend/base/langflow/components/models/CohereModel.py deleted file mode 100644 index 498e47fd4aeb..000000000000 --- a/src/backend/base/langflow/components/models/CohereModel.py +++ /dev/null @@ -1,41 +0,0 @@ -from langchain_cohere import ChatCohere -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import FloatInput, SecretStrInput - - -class CohereComponent(LCModelComponent): - display_name = "Cohere" - description = "Generate text using Cohere LLMs." - documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" - icon = "Cohere" - name = "CohereModel" - - inputs = LCModelComponent._base_inputs + [ - SecretStrInput( - name="cohere_api_key", - display_name="Cohere API Key", - info="The Cohere API Key to use for the Cohere model.", - advanced=False, - value="COHERE_API_KEY", - ), - FloatInput(name="temperature", display_name="Temperature", value=0.75), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - cohere_api_key = self.cohere_api_key - temperature = self.temperature - - if cohere_api_key: - api_key = SecretStr(cohere_api_key) - else: - api_key = None - - output = ChatCohere( - temperature=temperature or 0.75, - cohere_api_key=api_key, - ) - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py deleted file mode 100644 index 916831430665..000000000000 --- a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py +++ /dev/null @@ -1,77 +0,0 @@ -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput - - -class GoogleGenerativeAIComponent(LCModelComponent): - display_name = "Google Generative AI" - description = "Generate text using Google Generative AI." - icon = "GoogleGenerativeAI" - name = "GoogleGenerativeAIModel" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_output_tokens", - display_name="Max Output Tokens", - info="The maximum number of tokens to generate.", - ), - DropdownInput( - name="model", - display_name="Model", - info="The name of the model to use.", - options=["gemini-1.5-pro", "gemini-1.5-flash", "gemini-1.0-pro", "gemini-1.0-pro-vision"], - value="gemini-1.5-pro", - ), - SecretStrInput( - name="google_api_key", - display_name="Google API Key", - info="The Google API Key to use for the Google Generative AI.", - ), - FloatInput( - name="top_p", - display_name="Top P", - info="The maximum cumulative probability of tokens to consider when sampling.", - advanced=True, - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1), - IntInput( - name="n", - display_name="N", - info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", - advanced=True, - ), - IntInput( - name="top_k", - display_name="Top K", - info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", - advanced=True, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - try: - from langchain_google_genai import ChatGoogleGenerativeAI - except ImportError: - raise ImportError("The 'langchain_google_genai' package is required to use the Google Generative AI model.") - - google_api_key = self.google_api_key - model = self.model - max_output_tokens = self.max_output_tokens - temperature = self.temperature - top_k = self.top_k - top_p = self.top_p - n = self.n - - output = ChatGoogleGenerativeAI( # type: ignore - model=model, - max_output_tokens=max_output_tokens or None, - temperature=temperature, - top_k=top_k or None, - top_p=top_p or None, - n=n or 1, - google_api_key=SecretStr(google_api_key), - ) - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/GroqModel.py b/src/backend/base/langflow/components/models/GroqModel.py deleted file mode 100644 index 2e6d2df1e399..000000000000 --- a/src/backend/base/langflow/components/models/GroqModel.py +++ /dev/null @@ -1,98 +0,0 @@ -import requests -from typing import List -from langchain_groq import ChatGroq -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput - - -class GroqModel(LCModelComponent): - display_name: str = "Groq" - description: str = "Generate text using Groq." - icon = "Groq" - name = "GroqModel" - - inputs = LCModelComponent._base_inputs + [ - SecretStrInput( - name="groq_api_key", - display_name="Groq API Key", - info="API key for the Groq API.", - ), - MessageTextInput( - name="groq_api_base", - display_name="Groq API Base", - info="Base URL path for API requests, leave blank if not using a proxy or service emulator.", - advanced=True, - value="https://api.groq.com", - ), - IntInput( - name="max_tokens", - display_name="Max Output Tokens", - info="The maximum number of tokens to generate.", - advanced=True, - ), - FloatInput( - name="temperature", - display_name="Temperature", - info="Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", - value=0.1, - ), - IntInput( - name="n", - display_name="N", - info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", - advanced=True, - ), - DropdownInput( - name="model_name", - display_name="Model", - info="The name of the model to use.", - options=[], - refresh_button=True, - ), - ] - - def get_models(self) -> List[str]: - api_key = self.groq_api_key - base_url = self.groq_api_base or "https://api.groq.com" - url = f"{base_url}/openai/v1/models" - - headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} - - try: - response = requests.get(url, headers=headers) - response.raise_for_status() - model_list = response.json() - return [model["id"] for model in model_list.get("data", [])] - except requests.RequestException as e: - self.status = f"Error fetching models: {str(e)}" - return [] - - def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None): - if field_name == "groq_api_key" or field_name == "groq_api_base" or field_name == "model_name": - models = self.get_models() - build_config["model_name"]["options"] = models - return build_config - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - groq_api_key = self.groq_api_key - model_name = self.model_name - max_tokens = self.max_tokens - temperature = self.temperature - groq_api_base = self.groq_api_base - n = self.n - stream = self.stream - - output = ChatGroq( # type: ignore - model=model_name, - max_tokens=max_tokens or None, - temperature=temperature, - base_url=groq_api_base, - n=n or 1, - api_key=SecretStr(groq_api_key), - streaming=stream, - ) - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py deleted file mode 100644 index 83c27fbf8959..000000000000 --- a/src/backend/base/langflow/components/models/HuggingFaceModel.py +++ /dev/null @@ -1,60 +0,0 @@ -from tenacity import retry, stop_after_attempt, wait_fixed -from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import DictInput, DropdownInput, SecretStrInput, StrInput, IntInput - - -class HuggingFaceEndpointsComponent(LCModelComponent): - display_name: str = "HuggingFace" - description: str = "Generate text using Hugging Face Inference APIs." - icon = "HuggingFace" - name = "HuggingFaceModel" - - inputs = LCModelComponent._base_inputs + [ - StrInput( - name="model_id", - display_name="Model ID", - value="openai-community/gpt2", - ), - DropdownInput( - name="task", - display_name="Task", - options=["text2text-generation", "text-generation", "summarization", "translation"], - value="text-generation", - ), - SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True), - DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True), - IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True), - ] - - def create_huggingface_endpoint( - self, model_id: str, task: str, huggingfacehub_api_token: str, model_kwargs: dict - ) -> HuggingFaceEndpoint: - retry_attempts = self.retry_attempts # Access the retry attempts input - endpoint_url = f"https://api-inference.huggingface.co/models/{model_id}" - - @retry(stop=stop_after_attempt(retry_attempts), wait=wait_fixed(2)) - def _attempt_create(): - return HuggingFaceEndpoint( - endpoint_url=endpoint_url, - task=task, - huggingfacehub_api_token=huggingfacehub_api_token, - model_kwargs=model_kwargs, - ) - - return _attempt_create() - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - model_id = self.model_id - task = self.task - huggingfacehub_api_token = self.huggingfacehub_api_token - model_kwargs = self.model_kwargs or {} - - try: - llm = self.create_huggingface_endpoint(model_id, task, huggingfacehub_api_token, model_kwargs) - except Exception as e: - raise ValueError("Could not connect to HuggingFace Endpoints API.") from e - - return llm diff --git a/src/backend/base/langflow/components/models/Maritalk.py b/src/backend/base/langflow/components/models/Maritalk.py deleted file mode 100644 index e6b7c052e935..000000000000 --- a/src/backend/base/langflow/components/models/Maritalk.py +++ /dev/null @@ -1,52 +0,0 @@ -from langchain_community.chat_models import ChatMaritalk - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.field_typing.range_spec import RangeSpec -from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput - - -class MaritalkModelComponent(LCModelComponent): - display_name = "Maritalk" - description = "Generates text using Maritalk LLMs." - icon = "Maritalk" - name = "Maritalk" - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - value=512, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - ), - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=["sabia-2-small", "sabia-2-medium"], - value=["sabia-2-small"], - ), - SecretStrInput( - name="api_key", - display_name="Maritalk API Key", - info="The Maritalk API Key to use for the OpenAI model.", - advanced=False, - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - # self.output_schea is a list of dictionarie s - # let's convert it to a dictionary - api_key = self.api_key - temperature = self.temperature - model_name: str = self.model_name - max_tokens = self.max_tokens - - output = ChatMaritalk( - max_tokens=max_tokens, - model=model_name, - api_key=api_key, - temperature=temperature or 0.1, - ) - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/MistralModel.py b/src/backend/base/langflow/components/models/MistralModel.py deleted file mode 100644 index 41d84e043556..000000000000 --- a/src/backend/base/langflow/components/models/MistralModel.py +++ /dev/null @@ -1,92 +0,0 @@ -from langchain_mistralai import ChatMistralAI -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput - - -class MistralAIModelComponent(LCModelComponent): - display_name = "MistralAI" - description = "Generates text using MistralAI LLMs." - icon = "MistralAI" - name = "MistralModel" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - ), - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=[ - "open-mixtral-8x7b", - "open-mixtral-8x22b", - "mistral-small-latest", - "mistral-medium-latest", - "mistral-large-latest", - "codestral-latest", - ], - value="codestral-latest", - ), - StrInput( - name="mistral_api_base", - display_name="Mistral API Base", - advanced=True, - info=( - "The base URL of the Mistral API. Defaults to https://api.mistral.ai/v1. " - "You can change this to use other APIs like JinaChat, LocalAI and Prem." - ), - ), - SecretStrInput( - name="api_key", - display_name="Mistral API Key", - info="The Mistral API Key to use for the Mistral model.", - advanced=False, - ), - FloatInput(name="temperature", display_name="Temperature", advanced=False, value=0.5), - IntInput(name="max_retries", display_name="Max Retries", advanced=True, value=5), - IntInput(name="timeout", display_name="Timeout", advanced=True, value=60), - IntInput(name="max_concurrent_requests", display_name="Max Concurrent Requests", advanced=True, value=3), - FloatInput(name="top_p", display_name="Top P", advanced=True, value=1), - IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True), - BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - mistral_api_key = self.api_key - temperature = self.temperature - model_name = self.model_name - max_tokens = self.max_tokens - mistral_api_base = self.mistral_api_base or "https://api.mistral.ai/v1" - max_retries = self.max_retries - timeout = self.timeout - max_concurrent_requests = self.max_concurrent_requests - top_p = self.top_p - random_seed = self.random_seed - safe_mode = self.safe_mode - - if mistral_api_key: - api_key = SecretStr(mistral_api_key) - else: - api_key = None - - output = ChatMistralAI( - max_tokens=max_tokens or None, - model_name=model_name, - endpoint=mistral_api_base, - api_key=api_key, - temperature=temperature, - max_retries=max_retries, - timeout=timeout, - max_concurrent_requests=max_concurrent_requests, - top_p=top_p, - random_seed=random_seed, - safe_mode=safe_mode, - ) - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/NvidiaModel.py b/src/backend/base/langflow/components/models/NvidiaModel.py deleted file mode 100644 index 40a841d7c51c..000000000000 --- a/src/backend/base/langflow/components/models/NvidiaModel.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import Any - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput -from langflow.schema.dotdict import dotdict - - -class NVIDIAModelComponent(LCModelComponent): - display_name = "NVIDIA" - description = "Generates text using NVIDIA LLMs." - icon = "NVIDIA" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - ), - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=["mistralai/mixtral-8x7b-instruct-v0.1"], - value="mistralai/mixtral-8x7b-instruct-v0.1", - ), - StrInput( - name="base_url", - display_name="NVIDIA Base URL", - value="https://integrate.api.nvidia.com/v1", - refresh_button=True, - info="The base URL of the NVIDIA API. Defaults to https://integrate.api.nvidia.com/v1.", - ), - SecretStrInput( - name="nvidia_api_key", - display_name="NVIDIA API Key", - info="The NVIDIA API Key.", - advanced=False, - value="NVIDIA_API_KEY", - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1), - IntInput( - name="seed", - display_name="Seed", - info="The seed controls the reproducibility of the job.", - advanced=True, - value=1, - ), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "base_url" and field_value: - try: - build_model = self.build_model() - ids = [model.id for model in build_model.available_models] # type: ignore - build_config["model_name"]["options"] = ids - build_config["model_name"]["value"] = ids[0] - except Exception as e: - raise ValueError(f"Error getting model names: {e}") - return build_config - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - try: - from langchain_nvidia_ai_endpoints import ChatNVIDIA - except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.") - nvidia_api_key = self.nvidia_api_key - temperature = self.temperature - model_name: str = self.model_name - max_tokens = self.max_tokens - seed = self.seed - output = ChatNVIDIA( - max_tokens=max_tokens or None, - model=model_name, - base_url=self.base_url, - api_key=nvidia_api_key, # type: ignore - temperature=temperature or 0.1, - seed=seed, - ) - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py deleted file mode 100644 index f2c07d10d016..000000000000 --- a/src/backend/base/langflow/components/models/OllamaModel.py +++ /dev/null @@ -1,257 +0,0 @@ -from typing import Any - -import httpx -from langchain_community.chat_models import ChatOllama - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput - - -class ChatOllamaComponent(LCModelComponent): - display_name = "Ollama" - description = "Generate text using Ollama Local LLMs." - icon = "Ollama" - name = "OllamaModel" - - def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): - if field_name == "mirostat": - if field_value == "Disabled": - build_config["mirostat_eta"]["advanced"] = True - build_config["mirostat_tau"]["advanced"] = True - build_config["mirostat_eta"]["value"] = None - build_config["mirostat_tau"]["value"] = None - - else: - build_config["mirostat_eta"]["advanced"] = False - build_config["mirostat_tau"]["advanced"] = False - - if field_value == "Mirostat 2.0": - build_config["mirostat_eta"]["value"] = 0.2 - build_config["mirostat_tau"]["value"] = 10 - else: - build_config["mirostat_eta"]["value"] = 0.1 - build_config["mirostat_tau"]["value"] = 5 - - if field_name == "model_name": - base_url_dict = build_config.get("base_url", {}) - base_url_load_from_db = base_url_dict.get("load_from_db", False) - base_url_value = base_url_dict.get("value") - if base_url_load_from_db: - base_url_value = self.variables(base_url_value) - elif not base_url_value: - base_url_value = "http://localhost:11434" - build_config["model_name"]["options"] = self.get_model(base_url_value + "/api/tags") - - if field_name == "keep_alive_flag": - if field_value == "Keep": - build_config["keep_alive"]["value"] = "-1" - build_config["keep_alive"]["advanced"] = True - elif field_value == "Immediately": - build_config["keep_alive"]["value"] = "0" - build_config["keep_alive"]["advanced"] = True - else: - build_config["keep_alive"]["advanced"] = False - - return build_config - - def get_model(self, url: str) -> list[str]: - try: - with httpx.Client() as client: - response = client.get(url) - response.raise_for_status() - data = response.json() - - model_names = [model["name"] for model in data.get("models", [])] - return model_names - except Exception as e: - raise ValueError("Could not retrieve models. Please, make sure Ollama is running.") from e - - inputs = LCModelComponent._base_inputs + [ - StrInput( - name="base_url", - display_name="Base URL", - info="Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.", - value="http://localhost:11434", - ), - DropdownInput( - name="model_name", - display_name="Model Name", - value="llama3.1", - info="Refer to https://ollama.com/library for more models.", - refresh_button=True, - ), - FloatInput( - name="temperature", - display_name="Temperature", - value=0.2, - info="Controls the creativity of model responses.", - ), - StrInput( - name="format", - display_name="Format", - info="Specify the format of the output (e.g., json).", - advanced=True, - ), - DictInput( - name="metadata", - display_name="Metadata", - info="Metadata to add to the run trace.", - advanced=True, - ), - DropdownInput( - name="mirostat", - display_name="Mirostat", - options=["Disabled", "Mirostat", "Mirostat 2.0"], - info="Enable/disable Mirostat sampling for controlling perplexity.", - value="Disabled", - advanced=True, - real_time_refresh=True, - ), - FloatInput( - name="mirostat_eta", - display_name="Mirostat Eta", - info="Learning rate for Mirostat algorithm. (Default: 0.1)", - advanced=True, - ), - FloatInput( - name="mirostat_tau", - display_name="Mirostat Tau", - info="Controls the balance between coherence and diversity of the output. (Default: 5.0)", - advanced=True, - ), - IntInput( - name="num_ctx", - display_name="Context Window Size", - info="Size of the context window for generating tokens. (Default: 2048)", - advanced=True, - ), - IntInput( - name="num_gpu", - display_name="Number of GPUs", - info="Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)", - advanced=True, - ), - IntInput( - name="num_thread", - display_name="Number of Threads", - info="Number of threads to use during computation. (Default: detected for optimal performance)", - advanced=True, - ), - IntInput( - name="repeat_last_n", - display_name="Repeat Last N", - info="How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)", - advanced=True, - ), - FloatInput( - name="repeat_penalty", - display_name="Repeat Penalty", - info="Penalty for repetitions in generated text. (Default: 1.1)", - advanced=True, - ), - FloatInput( - name="tfs_z", - display_name="TFS Z", - info="Tail free sampling value. (Default: 1)", - advanced=True, - ), - IntInput( - name="timeout", - display_name="Timeout", - info="Timeout for the request stream.", - advanced=True, - ), - IntInput( - name="top_k", - display_name="Top K", - info="Limits token selection to top K. (Default: 40)", - advanced=True, - ), - FloatInput( - name="top_p", - display_name="Top P", - info="Works together with top-k. (Default: 0.9)", - advanced=True, - ), - BoolInput( - name="verbose", - display_name="Verbose", - info="Whether to print out response text.", - ), - StrInput( - name="tags", - display_name="Tags", - info="Comma-separated list of tags to add to the run trace.", - advanced=True, - ), - StrInput( - name="stop_tokens", - display_name="Stop Tokens", - info="Comma-separated list of tokens to signal the model to stop generating text.", - advanced=True, - ), - StrInput( - name="system", - display_name="System", - info="System to use for generating text.", - advanced=True, - ), - StrInput( - name="template", - display_name="Template", - info="Template to use for generating text.", - advanced=True, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - # Mapping mirostat settings to their corresponding values - mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} - - # Default to 0 for 'Disabled' - mirostat_value = mirostat_options.get(self.mirostat, 0) # type: ignore - - # Set mirostat_eta and mirostat_tau to None if mirostat is disabled - if mirostat_value == 0: - mirostat_eta = None - mirostat_tau = None - else: - mirostat_eta = self.mirostat_eta - mirostat_tau = self.mirostat_tau - - # Mapping system settings to their corresponding values - llm_params = { - "base_url": self.base_url, - "model": self.model_name, - "mirostat": mirostat_value, - "format": self.format, - "metadata": self.metadata, - "tags": self.tags.split(",") if self.tags else None, - "mirostat_eta": mirostat_eta, - "mirostat_tau": mirostat_tau, - "num_ctx": self.num_ctx or None, - "num_gpu": self.num_gpu or None, - "num_thread": self.num_thread or None, - "repeat_last_n": self.repeat_last_n or None, - "repeat_penalty": self.repeat_penalty or None, - "temperature": self.temperature or None, - "stop": self.stop_tokens.split(",") if self.stop_tokens else None, - "system": self.system, - "template": self.template, - "tfs_z": self.tfs_z or None, - "timeout": self.timeout or None, - "top_k": self.top_k or None, - "top_p": self.top_p or None, - "verbose": self.verbose, - } - - # Remove parameters with None values - llm_params = {k: v for k, v in llm_params.items() if v is not None} - - try: - output = ChatOllama(**llm_params) # type: ignore - except Exception as e: - raise ValueError("Could not initialize Ollama LLM.") from e - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py deleted file mode 100644 index 6dea57358103..000000000000 --- a/src/backend/base/langflow/components/models/OpenAIModel.py +++ /dev/null @@ -1,133 +0,0 @@ -import operator -from functools import reduce - -from langflow.field_typing.range_spec import RangeSpec -from langchain_openai import ChatOpenAI -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.base.models.openai_constants import OPENAI_MODEL_NAMES -from langflow.field_typing import LanguageModel -from langflow.inputs import ( - BoolInput, - DictInput, - DropdownInput, - FloatInput, - IntInput, - SecretStrInput, - StrInput, -) - - -class OpenAIModelComponent(LCModelComponent): - display_name = "OpenAI" - description = "Generates text using OpenAI LLMs." - icon = "OpenAI" - name = "OpenAIModel" - - inputs = LCModelComponent._base_inputs + [ - IntInput( - name="max_tokens", - display_name="Max Tokens", - advanced=True, - info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - range_spec=RangeSpec(min=0, max=128000), - ), - DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), - BoolInput( - name="json_mode", - display_name="JSON Mode", - advanced=True, - info="If True, it will output JSON regardless of passing a schema.", - ), - DictInput( - name="output_schema", - is_list=True, - display_name="Schema", - advanced=True, - info="The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - ), - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=OPENAI_MODEL_NAMES, - value=OPENAI_MODEL_NAMES[0], - ), - StrInput( - name="openai_api_base", - display_name="OpenAI API Base", - advanced=True, - info="The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - ), - SecretStrInput( - name="api_key", - display_name="OpenAI API Key", - info="The OpenAI API Key to use for the OpenAI model.", - advanced=False, - value="OPENAI_API_KEY", - ), - FloatInput(name="temperature", display_name="Temperature", value=0.1), - IntInput( - name="seed", - display_name="Seed", - info="The seed controls the reproducibility of the job.", - advanced=True, - value=1, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - # self.output_schema is a list of dictionaries - # let's convert it to a dictionary - output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {}) - openai_api_key = self.api_key - temperature = self.temperature - model_name: str = self.model_name - max_tokens = self.max_tokens - model_kwargs = self.model_kwargs or {} - openai_api_base = self.openai_api_base or "https://api.openai.com/v1" - json_mode = bool(output_schema_dict) or self.json_mode - seed = self.seed - - if openai_api_key: - api_key = SecretStr(openai_api_key) - else: - api_key = None - output = ChatOpenAI( - max_tokens=max_tokens or None, - model_kwargs=model_kwargs, - model=model_name, - base_url=openai_api_base, - api_key=api_key, - temperature=temperature if temperature is not None else 0.1, - seed=seed, - ) - if json_mode: - if output_schema_dict: - output = output.with_structured_output(schema=output_schema_dict, method="json_mode") # type: ignore - else: - output = output.bind(response_format={"type": "json_object"}) # type: ignore - - return output # type: ignore - - def _get_exception_message(self, e: Exception): - """ - Get a message from an OpenAI exception. - - Args: - exception (Exception): The exception to get the message from. - - Returns: - str: The message from the exception. - """ - - try: - from openai import BadRequestError - except ImportError: - return - if isinstance(e, BadRequestError): - message = e.body.get("message") # type: ignore - if message: - return message - return diff --git a/src/backend/base/langflow/components/models/PerplexityModel.py b/src/backend/base/langflow/components/models/PerplexityModel.py deleted file mode 100644 index 7265db9b47ec..000000000000 --- a/src/backend/base/langflow/components/models/PerplexityModel.py +++ /dev/null @@ -1,83 +0,0 @@ -from langchain_community.chat_models import ChatPerplexity -from pydantic.v1 import SecretStr - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.io import FloatInput, SecretStrInput, DropdownInput, IntInput - - -class PerplexityComponent(LCModelComponent): - display_name = "Perplexity" - description = "Generate text using Perplexity LLMs." - documentation = "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/" - icon = "Perplexity" - name = "PerplexityModel" - - inputs = LCModelComponent._base_inputs + [ - DropdownInput( - name="model_name", - display_name="Model Name", - advanced=False, - options=[ - "llama-3.1-sonar-small-128k-online", - "llama-3.1-sonar-large-128k-online", - "llama-3.1-sonar-huge-128k-online", - "llama-3.1-sonar-small-128k-chat", - "llama-3.1-sonar-large-128k-chat", - "llama-3.1-8b-instruct", - "llama-3.1-70b-instruct", - ], - value="llama-3.1-sonar-small-128k-online", - ), - IntInput( - name="max_output_tokens", - display_name="Max Output Tokens", - info="The maximum number of tokens to generate.", - ), - SecretStrInput( - name="api_key", - display_name="Perplexity API Key", - info="The Perplexity API Key to use for the Perplexity model.", - advanced=False, - ), - FloatInput(name="temperature", display_name="Temperature", value=0.75), - FloatInput( - name="top_p", - display_name="Top P", - info="The maximum cumulative probability of tokens to consider when sampling.", - advanced=True, - ), - IntInput( - name="n", - display_name="N", - info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", - advanced=True, - ), - IntInput( - name="top_k", - display_name="Top K", - info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", - advanced=True, - ), - ] - - def build_model(self) -> LanguageModel: # type: ignore[type-var] - api_key = SecretStr(self.api_key).get_secret_value() - temperature = self.temperature - model = self.model_name - max_output_tokens = self.max_output_tokens - top_k = self.top_k - top_p = self.top_p - n = self.n - - output = ChatPerplexity( - model=model, - temperature=temperature or 0.75, - pplx_api_key=api_key, - top_k=top_k or None, - top_p=top_p or None, - n=n or 1, - max_output_tokens=max_output_tokens, - ) - - return output # type: ignore diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py deleted file mode 100644 index 76f91cdad985..000000000000 --- a/src/backend/base/langflow/components/models/VertexAiModel.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import cast - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import LanguageModel -from langflow.inputs import MessageTextInput -from langflow.io import BoolInput, FileInput, FloatInput, IntInput, StrInput - - -class ChatVertexAIComponent(LCModelComponent): - display_name = "Vertex AI" - description = "Generate text using Vertex AI LLMs." - icon = "VertexAI" - name = "VertexAiModel" - - inputs = LCModelComponent._base_inputs + [ - FileInput( - name="credentials", - display_name="Credentials", - info="JSON credentials file. Leave empty to fallback to environment variables", - file_types=["json"], - ), - MessageTextInput(name="model_name", display_name="Model Name", value="gemini-1.5-pro"), - StrInput(name="project", display_name="Project", info="The project ID.", advanced=True), - StrInput(name="location", display_name="Location", value="us-central1", advanced=True), - IntInput(name="max_output_tokens", display_name="Max Output Tokens", advanced=True), - IntInput(name="max_retries", display_name="Max Retries", value=1, advanced=True), - FloatInput(name="temperature", value=0.0, display_name="Temperature"), - IntInput(name="top_k", display_name="Top K", advanced=True), - FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True), - BoolInput(name="verbose", display_name="Verbose", value=False, advanced=True), - ] - - def build_model(self) -> LanguageModel: - try: - from langchain_google_vertexai import ChatVertexAI - except ImportError: - raise ImportError( - "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." - ) - location = self.location or None - if self.credentials: - from google.cloud import aiplatform - from google.oauth2 import service_account - - credentials = service_account.Credentials.from_service_account_file(self.credentials) - project = self.project or credentials.project_id - # ChatVertexAI sometimes skip manual credentials initialization - aiplatform.init( - project=project, - location=location, - credentials=credentials, - ) - else: - project = self.project or None - credentials = None - - return cast( - LanguageModel, - ChatVertexAI( - credentials=credentials, - location=location, - project=project, - max_output_tokens=self.max_output_tokens or None, - max_retries=self.max_retries, - model_name=self.model_name, - temperature=self.temperature, - top_k=self.top_k or None, - top_p=self.top_p, - verbose=self.verbose, - ), - ) diff --git a/src/backend/base/langflow/components/models/__init__.py b/src/backend/base/langflow/components/models/__init__.py index 08385063c664..974f32a9c7de 100644 --- a/src/backend/base/langflow/components/models/__init__.py +++ b/src/backend/base/langflow/components/models/__init__.py @@ -1,28 +1,37 @@ -from .AIMLModel import AIMLModelComponent -from .AmazonBedrockModel import AmazonBedrockComponent -from .AnthropicModel import AnthropicModelComponent -from .AzureOpenAIModel import AzureChatOpenAIComponent -from .BaiduQianfanChatModel import QianfanChatEndpointComponent -from .CohereModel import CohereComponent -from .GoogleGenerativeAIModel import GoogleGenerativeAIComponent -from .HuggingFaceModel import HuggingFaceEndpointsComponent -from .OllamaModel import ChatOllamaComponent -from .OpenAIModel import OpenAIModelComponent -from .VertexAiModel import ChatVertexAIComponent -from .PerplexityModel import PerplexityComponent +from .aiml import AIMLModelComponent +from .amazon_bedrock import AmazonBedrockComponent +from .anthropic import AnthropicModelComponent +from .azure_openai import AzureChatOpenAIComponent +from .baidu_qianfan_chat import QianfanChatEndpointComponent +from .cohere import CohereComponent +from .google_generative_ai import GoogleGenerativeAIComponent +from .groq import GroqModel +from .huggingface import HuggingFaceEndpointsComponent +from .lmstudiomodel import LMStudioModelComponent +from .maritalk import MaritalkModelComponent +from .mistral import MistralAIModelComponent +from .nvidia import NVIDIAModelComponent +from .ollama import ChatOllamaComponent +from .openai import OpenAIModelComponent +from .perplexity import PerplexityComponent +from .vertexai import ChatVertexAIComponent __all__ = [ "AIMLModelComponent", "AmazonBedrockComponent", "AnthropicModelComponent", "AzureChatOpenAIComponent", - "QianfanChatEndpointComponent", + "ChatOllamaComponent", + "ChatVertexAIComponent", "CohereComponent", "GoogleGenerativeAIComponent", + "GroqModel", "HuggingFaceEndpointsComponent", - "ChatOllamaComponent", + "LMStudioModelComponent", + "MaritalkModelComponent", + "MistralAIModelComponent", + "NVIDIAModelComponent", "OpenAIModelComponent", - "ChatVertexAIComponent", "PerplexityComponent", - "base", + "QianfanChatEndpointComponent", ] diff --git a/src/backend/base/langflow/components/models/aiml.py b/src/backend/base/langflow/components/models/aiml.py new file mode 100644 index 000000000000..7bf3500a2fb9 --- /dev/null +++ b/src/backend/base/langflow/components/models/aiml.py @@ -0,0 +1,112 @@ +from langchain_openai import ChatOpenAI +from pydantic.v1 import SecretStr + +from langflow.base.models.aiml_constants import AIML_CHAT_MODELS +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import ( + DictInput, + DropdownInput, + FloatInput, + IntInput, + SecretStrInput, + StrInput, +) +from langflow.inputs.inputs import HandleInput + + +class AIMLModelComponent(LCModelComponent): + display_name = "AIML" + description = "Generates text using AIML LLMs." + icon = "AIML" + name = "AIMLModel" + documentation = "https://docs.aimlapi.com/api-reference" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + range_spec=RangeSpec(min=0, max=128000), + ), + DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=AIML_CHAT_MODELS, + value=AIML_CHAT_MODELS[0], + ), + StrInput( + name="aiml_api_base", + display_name="AIML API Base", + advanced=True, + info="The base URL of the OpenAI API. Defaults to https://api.aimlapi.com . " + "You can change this to use other APIs like JinaChat, LocalAI and Prem.", + ), + SecretStrInput( + name="api_key", + display_name="AIML API Key", + info="The AIML API Key to use for the OpenAI model.", + advanced=False, + value="AIML_API_KEY", + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + IntInput( + name="seed", + display_name="Seed", + info="The seed controls the reproducibility of the job.", + advanced=True, + value=1, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + aiml_api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + model_kwargs = self.model_kwargs or {} + aiml_api_base = self.aiml_api_base or "https://api.aimlapi.com" + seed = self.seed + + openai_api_key = aiml_api_key.get_secret_value() if isinstance(aiml_api_key, SecretStr) else aiml_api_key + + return ChatOpenAI( + model=model_name, + temperature=temperature, + api_key=openai_api_key, + base_url=aiml_api_base, + max_tokens=max_tokens or None, + seed=seed, + **model_kwargs, + ) + + def _get_exception_message(self, e: Exception): + """Get a message from an OpenAI exception. + + Args: + e (Exception): The exception to get the message from. + + Returns: + str: The message from the exception. + """ + try: + from openai.error import BadRequestError + except ImportError: + return None + if isinstance(e, BadRequestError): + message = e.json_body.get("error", {}).get("message", "") + if message: + return message + return None diff --git a/src/backend/base/langflow/components/models/amazon_bedrock.py b/src/backend/base/langflow/components/models/amazon_bedrock.py new file mode 100644 index 000000000000..4c2c657360de --- /dev/null +++ b/src/backend/base/langflow/components/models/amazon_bedrock.py @@ -0,0 +1,130 @@ +from langflow.base.models.aws_constants import AWS_REGIONS, AWS_MODEL_IDs +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs import MessageTextInput, SecretStrInput +from langflow.inputs.inputs import HandleInput +from langflow.io import DictInput, DropdownInput + + +class AmazonBedrockComponent(LCModelComponent): + display_name: str = "Amazon Bedrock" + description: str = "Generate text using Amazon Bedrock LLMs." + icon = "Amazon" + name = "AmazonBedrockModel" + + inputs = [ + *LCModelComponent._base_inputs, + DropdownInput( + name="model_id", + display_name="Model ID", + options=AWS_MODEL_IDs, + value="anthropic.claude-3-haiku-20240307-v1:0", + info="List of available model IDs to choose from.", + ), + SecretStrInput( + name="aws_access_key_id", + display_name="AWS Access Key ID", + info="The access key for your AWS account." + "Usually set in Python code as the environment variable 'AWS_ACCESS_KEY_ID'.", + value="AWS_ACCESS_KEY_ID", + ), + SecretStrInput( + name="aws_secret_access_key", + display_name="AWS Secret Access Key", + info="The secret key for your AWS account. " + "Usually set in Python code as the environment variable 'AWS_SECRET_ACCESS_KEY'.", + value="AWS_SECRET_ACCESS_KEY", + ), + SecretStrInput( + name="aws_session_token", + display_name="AWS Session Token", + advanced=False, + info="The session key for your AWS account. " + "Only needed for temporary credentials. " + "Usually set in Python code as the environment variable 'AWS_SESSION_TOKEN'.", + load_from_db=False, + ), + SecretStrInput( + name="credentials_profile_name", + display_name="Credentials Profile Name", + advanced=True, + info="The name of the profile to use from your " + "~/.aws/credentials file. " + "If not provided, the default profile will be used.", + load_from_db=False, + ), + DropdownInput( + name="region_name", + display_name="Region Name", + value="us-east-1", + options=AWS_REGIONS, + info="The AWS region where your Bedrock resources are located.", + ), + DictInput( + name="model_kwargs", + display_name="Model Kwargs", + advanced=True, + is_list=True, + info="Additional keyword arguments to pass to the model.", + ), + MessageTextInput( + name="endpoint_url", + display_name="Endpoint URL", + advanced=True, + info="The URL of the Bedrock endpoint to use.", + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + try: + from langchain_aws import ChatBedrock + except ImportError as e: + msg = "langchain_aws is not installed. Please install it with `pip install langchain_aws`." + raise ImportError(msg) from e + try: + import boto3 + except ImportError as e: + msg = "boto3 is not installed. Please install it with `pip install boto3`." + raise ImportError(msg) from e + if self.aws_access_key_id or self.aws_secret_access_key: + try: + session = boto3.Session( + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + aws_session_token=self.aws_session_token, + ) + except Exception as e: + msg = "Could not create a boto3 session." + raise ValueError(msg) from e + elif self.credentials_profile_name: + session = boto3.Session(profile_name=self.credentials_profile_name) + else: + session = boto3.Session() + + client_params = {} + if self.endpoint_url: + client_params["endpoint_url"] = self.endpoint_url + if self.region_name: + client_params["region_name"] = self.region_name + + boto3_client = session.client("bedrock-runtime", **client_params) + try: + output = ChatBedrock( + client=boto3_client, + model_id=self.model_id, + region_name=self.region_name, + model_kwargs=self.model_kwargs, + endpoint_url=self.endpoint_url, + streaming=self.stream, + ) + except Exception as e: + msg = "Could not connect to AmazonBedrock API." + raise ValueError(msg) from e + return output diff --git a/src/backend/base/langflow/components/models/anthropic.py b/src/backend/base/langflow/components/models/anthropic.py new file mode 100644 index 000000000000..7fcab7e371ff --- /dev/null +++ b/src/backend/base/langflow/components/models/anthropic.py @@ -0,0 +1,100 @@ +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput + + +class AnthropicModelComponent(LCModelComponent): + display_name = "Anthropic" + description = "Generate text using Anthropic Chat&Completion LLMs with prefill support." + icon = "Anthropic" + name = "AnthropicModel" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + value=4096, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model", + display_name="Model Name", + options=[ + "claude-3-5-sonnet-20240620", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + ], + info="https://python.langchain.com/docs/integrations/chat/anthropic", + value="claude-3-5-sonnet-20240620", + ), + SecretStrInput(name="anthropic_api_key", display_name="Anthropic API Key", info="Your Anthropic API key."), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + MessageTextInput( + name="anthropic_api_url", + display_name="Anthropic API URL", + advanced=True, + info="Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + ), + MessageTextInput( + name="prefill", display_name="Prefill", info="Prefill text to guide the model's response.", advanced=True + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + try: + from langchain_anthropic.chat_models import ChatAnthropic + except ImportError as e: + msg = "langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`." + raise ImportError(msg) from e + model = self.model + anthropic_api_key = self.anthropic_api_key + max_tokens = self.max_tokens + temperature = self.temperature + anthropic_api_url = self.anthropic_api_url or "https://api.anthropic.com" + + try: + output = ChatAnthropic( + model=model, + anthropic_api_key=(SecretStr(anthropic_api_key).get_secret_value() if anthropic_api_key else None), + max_tokens_to_sample=max_tokens, + temperature=temperature, + anthropic_api_url=anthropic_api_url, + streaming=self.stream, + ) + except Exception as e: + msg = "Could not connect to Anthropic API." + raise ValueError(msg) from e + + return output + + def _get_exception_message(self, exception: Exception) -> str | None: + """Get a message from an Anthropic exception. + + Args: + exception (Exception): The exception to get the message from. + + Returns: + str: The message from the exception. + """ + try: + from anthropic import BadRequestError + except ImportError: + return None + if isinstance(exception, BadRequestError): + message = exception.body.get("error", {}).get("message") + if message: + return message + return None diff --git a/src/backend/base/langflow/components/models/azure_openai.py b/src/backend/base/langflow/components/models/azure_openai.py new file mode 100644 index 000000000000..da9d1cecc99e --- /dev/null +++ b/src/backend/base/langflow/components/models/azure_openai.py @@ -0,0 +1,92 @@ +from langchain_openai import AzureChatOpenAI + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs import MessageTextInput +from langflow.inputs.inputs import HandleInput +from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput + + +class AzureChatOpenAIComponent(LCModelComponent): + display_name: str = "Azure OpenAI" + description: str = "Generate text using Azure OpenAI LLMs." + documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" + beta = False + icon = "Azure" + name = "AzureOpenAIModel" + + AZURE_OPENAI_API_VERSIONS = [ + "2024-06-01", + "2024-07-01-preview", + "2024-08-01-preview", + "2024-09-01-preview", + "2024-10-01-preview", + "2023-05-15", + "2023-12-01-preview", + "2024-02-15-preview", + "2024-03-01-preview", + ] + + inputs = [ + *LCModelComponent._base_inputs, + MessageTextInput( + name="azure_endpoint", + display_name="Azure Endpoint", + info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`", + required=True, + ), + MessageTextInput(name="azure_deployment", display_name="Deployment Name", required=True), + SecretStrInput(name="api_key", display_name="API Key"), + DropdownInput( + name="api_version", + display_name="API Version", + options=sorted(AZURE_OPENAI_API_VERSIONS, reverse=True), + value=next( + ( + version + for version in sorted(AZURE_OPENAI_API_VERSIONS, reverse=True) + if not version.endswith("-preview") + ), + AZURE_OPENAI_API_VERSIONS[0], + ), + ), + FloatInput(name="temperature", display_name="Temperature", value=0.7), + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + azure_endpoint = self.azure_endpoint + azure_deployment = self.azure_deployment + api_version = self.api_version + api_key = self.api_key + temperature = self.temperature + max_tokens = self.max_tokens + stream = self.stream + + try: + output = AzureChatOpenAI( + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_version=api_version, + api_key=api_key, + temperature=temperature, + max_tokens=max_tokens or None, + streaming=stream, + ) + except Exception as e: + msg = f"Could not connect to AzureOpenAI API: {e}" + raise ValueError(msg) from e + + return output diff --git a/src/backend/base/langflow/components/models/baidu_qianfan_chat.py b/src/backend/base/langflow/components/models/baidu_qianfan_chat.py new file mode 100644 index 000000000000..b5ae6b03a826 --- /dev/null +++ b/src/backend/base/langflow/components/models/baidu_qianfan_chat.py @@ -0,0 +1,102 @@ +from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing.constants import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import DropdownInput, FloatInput, MessageTextInput, SecretStrInput + + +class QianfanChatEndpointComponent(LCModelComponent): + display_name: str = "Qianfan" + description: str = "Generate text using Baidu Qianfan LLMs." + documentation: str = "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint" + icon = "BaiduQianfan" + name = "BaiduQianfanChatModel" + + inputs = [ + *LCModelComponent._base_inputs, + DropdownInput( + name="model", + display_name="Model Name", + options=[ + "ERNIE-Bot", + "ERNIE-Bot-turbo", + "BLOOMZ-7B", + "Llama-2-7b-chat", + "Llama-2-13b-chat", + "Llama-2-70b-chat", + "Qianfan-BLOOMZ-7B-compressed", + "Qianfan-Chinese-Llama-2-7B", + "ChatGLM2-6B-32K", + "AquilaChat-7B", + ], + info="https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint", + value="ERNIE-Bot-turbo", + ), + SecretStrInput( + name="qianfan_ak", + display_name="Qianfan Ak", + info="which you could get from https://cloud.baidu.com/product/wenxinworkshop", + ), + SecretStrInput( + name="qianfan_sk", + display_name="Qianfan Sk", + info="which you could get from https://cloud.baidu.com/product/wenxinworkshop", + ), + FloatInput( + name="top_p", + display_name="Top p", + info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + value=0.8, + advanced=True, + ), + FloatInput( + name="temperature", + display_name="Temperature", + info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + value=0.95, + ), + FloatInput( + name="penalty_score", + display_name="Penalty Score", + info="Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + value=1.0, + advanced=True, + ), + MessageTextInput( + name="endpoint", display_name="Endpoint", info="Endpoint of the Qianfan LLM, required if custom model used." + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + model = self.model + qianfan_ak = self.qianfan_ak + qianfan_sk = self.qianfan_sk + top_p = self.top_p + temperature = self.temperature + penalty_score = self.penalty_score + endpoint = self.endpoint + + try: + output = QianfanChatEndpoint( + model=model, + qianfan_ak=SecretStr(qianfan_ak).get_secret_value() if qianfan_ak else None, + qianfan_sk=SecretStr(qianfan_sk).get_secret_value() if qianfan_sk else None, + top_p=top_p, + temperature=temperature, + penalty_score=penalty_score, + endpoint=endpoint, + ) + except Exception as e: + msg = "Could not connect to Baidu Qianfan API." + raise ValueError(msg) from e + + return output diff --git a/src/backend/base/langflow/components/models/cohere.py b/src/backend/base/langflow/components/models/cohere.py new file mode 100644 index 000000000000..5c224744cb55 --- /dev/null +++ b/src/backend/base/langflow/components/models/cohere.py @@ -0,0 +1,45 @@ +from langchain_cohere import ChatCohere +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import FloatInput, SecretStrInput + + +class CohereComponent(LCModelComponent): + display_name = "Cohere" + description = "Generate text using Cohere LLMs." + documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" + icon = "Cohere" + name = "CohereModel" + + inputs = [ + *LCModelComponent._base_inputs, + SecretStrInput( + name="cohere_api_key", + display_name="Cohere API Key", + info="The Cohere API Key to use for the Cohere model.", + advanced=False, + value="COHERE_API_KEY", + ), + FloatInput(name="temperature", display_name="Temperature", value=0.75), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + cohere_api_key = self.cohere_api_key + temperature = self.temperature + + api_key = SecretStr(cohere_api_key).get_secret_value() if cohere_api_key else None + + return ChatCohere( + temperature=temperature or 0.75, + cohere_api_key=api_key, + ) diff --git a/src/backend/base/langflow/components/models/google_generative_ai.py b/src/backend/base/langflow/components/models/google_generative_ai.py new file mode 100644 index 000000000000..a5e5f99965d1 --- /dev/null +++ b/src/backend/base/langflow/components/models/google_generative_ai.py @@ -0,0 +1,84 @@ +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput +from langflow.inputs.inputs import HandleInput + + +class GoogleGenerativeAIComponent(LCModelComponent): + display_name = "Google Generative AI" + description = "Generate text using Google Generative AI." + icon = "GoogleGenerativeAI" + name = "GoogleGenerativeAIModel" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_output_tokens", display_name="Max Output Tokens", info="The maximum number of tokens to generate." + ), + DropdownInput( + name="model", + display_name="Model", + info="The name of the model to use.", + options=["gemini-1.5-pro", "gemini-1.5-flash", "gemini-1.0-pro", "gemini-1.0-pro-vision"], + value="gemini-1.5-pro", + ), + SecretStrInput( + name="google_api_key", + display_name="Google API Key", + info="The Google API Key to use for the Google Generative AI.", + ), + FloatInput( + name="top_p", + display_name="Top P", + info="The maximum cumulative probability of tokens to consider when sampling.", + advanced=True, + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + IntInput( + name="n", + display_name="N", + info="Number of chat completions to generate for each prompt. " + "Note that the API may not return the full n completions if duplicates are generated.", + advanced=True, + ), + IntInput( + name="top_k", + display_name="Top K", + info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", + advanced=True, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + try: + from langchain_google_genai import ChatGoogleGenerativeAI + except ImportError as e: + msg = "The 'langchain_google_genai' package is required to use the Google Generative AI model." + raise ImportError(msg) from e + + google_api_key = self.google_api_key + model = self.model + max_output_tokens = self.max_output_tokens + temperature = self.temperature + top_k = self.top_k + top_p = self.top_p + n = self.n + + return ChatGoogleGenerativeAI( + model=model, + max_output_tokens=max_output_tokens or None, + temperature=temperature, + top_k=top_k or None, + top_p=top_p or None, + n=n or 1, + google_api_key=SecretStr(google_api_key).get_secret_value(), + ) diff --git a/src/backend/base/langflow/components/models/groq.py b/src/backend/base/langflow/components/models/groq.py new file mode 100644 index 000000000000..81a97b6fb0c3 --- /dev/null +++ b/src/backend/base/langflow/components/models/groq.py @@ -0,0 +1,103 @@ +import requests +from langchain_groq import ChatGroq +from pydantic.v1 import SecretStr +from typing_extensions import override + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput + + +class GroqModel(LCModelComponent): + display_name: str = "Groq" + description: str = "Generate text using Groq." + icon = "Groq" + name = "GroqModel" + + inputs = [ + *LCModelComponent._base_inputs, + SecretStrInput(name="groq_api_key", display_name="Groq API Key", info="API key for the Groq API."), + MessageTextInput( + name="groq_api_base", + display_name="Groq API Base", + info="Base URL path for API requests, leave blank if not using a proxy or service emulator.", + advanced=True, + value="https://api.groq.com", + ), + IntInput( + name="max_tokens", + display_name="Max Output Tokens", + info="The maximum number of tokens to generate.", + advanced=True, + ), + FloatInput( + name="temperature", + display_name="Temperature", + info="Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", + value=0.1, + ), + IntInput( + name="n", + display_name="N", + info="Number of chat completions to generate for each prompt. " + "Note that the API may not return the full n completions if duplicates are generated.", + advanced=True, + ), + DropdownInput( + name="model_name", + display_name="Model", + info="The name of the model to use.", + options=[], + refresh_button=True, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def get_models(self) -> list[str]: + api_key = self.groq_api_key + base_url = self.groq_api_base or "https://api.groq.com" + url = f"{base_url}/openai/v1/models" + + headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} + + try: + response = requests.get(url, headers=headers, timeout=10) + response.raise_for_status() + model_list = response.json() + return [model["id"] for model in model_list.get("data", [])] + except requests.RequestException as e: + self.status = f"Error fetching models: {e}" + return [] + + @override + def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None): + if field_name in {"groq_api_key", "groq_api_base", "model_name"}: + models = self.get_models() + build_config["model_name"]["options"] = models + return build_config + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + groq_api_key = self.groq_api_key + model_name = self.model_name + max_tokens = self.max_tokens + temperature = self.temperature + groq_api_base = self.groq_api_base + n = self.n + stream = self.stream + + return ChatGroq( + model=model_name, + max_tokens=max_tokens or None, + temperature=temperature, + base_url=groq_api_base, + n=n or 1, + api_key=SecretStr(groq_api_key).get_secret_value(), + streaming=stream, + ) diff --git a/src/backend/base/langflow/components/models/huggingface.py b/src/backend/base/langflow/components/models/huggingface.py new file mode 100644 index 000000000000..e65c5ed454d8 --- /dev/null +++ b/src/backend/base/langflow/components/models/huggingface.py @@ -0,0 +1,151 @@ +from typing import Any + +from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint +from tenacity import retry, stop_after_attempt, wait_fixed + +# TODO: langchain_community.llms.huggingface_endpoint is depreciated. +# Need to update to langchain_huggingface, but have dependency with langchain_core 0.3.0 +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput + + +class HuggingFaceEndpointsComponent(LCModelComponent): + display_name: str = "HuggingFace" + description: str = "Generate text using Hugging Face Inference APIs." + icon = "HuggingFace" + name = "HuggingFaceModel" + + inputs = [ + *LCModelComponent._base_inputs, + StrInput(name="model_id", display_name="Model ID", value="openai-community/gpt2"), + IntInput( + name="max_new_tokens", display_name="Max New Tokens", value=512, info="Maximum number of generated tokens" + ), + IntInput( + name="top_k", + display_name="Top K", + advanced=True, + info="The number of highest probability vocabulary tokens to keep for top-k-filtering", + ), + FloatInput( + name="top_p", + display_name="Top P", + value=0.95, + advanced=True, + info=( + "If set to < 1, only the smallest set of most probable tokens with " + "probabilities that add up to `top_p` or higher are kept for generation" + ), + ), + FloatInput( + name="typical_p", + display_name="Typical P", + value=0.95, + advanced=True, + info="Typical Decoding mass.", + ), + FloatInput( + name="temperature", + display_name="Temperature", + value=0.8, + advanced=True, + info="The value used to module the logits distribution", + ), + FloatInput( + name="repetition_penalty", + display_name="Repetition Penalty", + info="The parameter for repetition penalty. 1.0 means no penalty.", + advanced=True, + ), + StrInput( + name="inference_endpoint", + display_name="Inference Endpoint", + value="https://api-inference.huggingface.co/models/", + info="Custom inference endpoint URL.", + ), + DropdownInput( + name="task", + display_name="Task", + options=["text2text-generation", "text-generation", "summarization", "translation"], + advanced=True, + info="The task to call the model with. Should be a task that returns `generated_text` or `summary_text`.", + ), + SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True), + DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True), + IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def get_api_url(self) -> str: + if "huggingface" in self.inference_endpoint.lower(): + return f"{self.inference_endpoint}{self.model_id}" + return self.inference_endpoint + + def create_huggingface_endpoint( + self, + task: str | None, + huggingfacehub_api_token: str | None, + model_kwargs: dict[str, Any], + max_new_tokens: int, + top_k: int | None, + top_p: float, + typical_p: float | None, + temperature: float | None, + repetition_penalty: float | None, + ) -> HuggingFaceEndpoint: + retry_attempts = self.retry_attempts + endpoint_url = self.get_api_url() + + @retry(stop=stop_after_attempt(retry_attempts), wait=wait_fixed(2)) + def _attempt_create(): + return HuggingFaceEndpoint( + endpoint_url=endpoint_url, + task=task, + huggingfacehub_api_token=huggingfacehub_api_token, + model_kwargs=model_kwargs, + max_new_tokens=max_new_tokens, + top_k=top_k, + top_p=top_p, + typical_p=typical_p, + temperature=temperature, + repetition_penalty=repetition_penalty, + ) + + return _attempt_create() + + def build_model(self) -> LanguageModel: + task = self.task or None + huggingfacehub_api_token = self.huggingfacehub_api_token + model_kwargs = self.model_kwargs or {} + max_new_tokens = self.max_new_tokens + top_k = self.top_k or None + top_p = self.top_p + typical_p = self.typical_p or None + temperature = self.temperature or 0.8 + repetition_penalty = self.repetition_penalty or None + + try: + llm = self.create_huggingface_endpoint( + task=task, + huggingfacehub_api_token=huggingfacehub_api_token, + model_kwargs=model_kwargs, + max_new_tokens=max_new_tokens, + top_k=top_k, + top_p=top_p, + typical_p=typical_p, + temperature=temperature, + repetition_penalty=repetition_penalty, + ) + except Exception as e: + msg = "Could not connect to HuggingFace Endpoints API." + raise ValueError(msg) from e + + return llm diff --git a/src/backend/base/langflow/components/models/lmstudiomodel.py b/src/backend/base/langflow/components/models/lmstudiomodel.py new file mode 100644 index 000000000000..f4f64c5c9c65 --- /dev/null +++ b/src/backend/base/langflow/components/models/lmstudiomodel.py @@ -0,0 +1,134 @@ +from typing import Any +from urllib.parse import urljoin + +import httpx +from langchain_openai import ChatOpenAI +from pydantic.v1 import SecretStr +from typing_extensions import override + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput +from langflow.inputs.inputs import HandleInput + + +class LMStudioModelComponent(LCModelComponent): + display_name = "LM Studio" + description = "Generate text using LM Studio Local LLMs." + icon = "LMStudio" + name = "LMStudioModel" + + @override + def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): + if field_name == "model_name": + base_url_dict = build_config.get("base_url", {}) + base_url_load_from_db = base_url_dict.get("load_from_db", False) + base_url_value = base_url_dict.get("value") + if base_url_load_from_db: + base_url_value = self.variables(base_url_value) + elif not base_url_value: + base_url_value = "http://localhost:1234/v1" + build_config["model_name"]["options"] = self.get_model(base_url_value) + + return build_config + + def get_model(self, base_url_value: str) -> list[str]: + try: + url = urljoin(base_url_value, "/v1/models") + with httpx.Client() as client: + response = client.get(url) + response.raise_for_status() + data = response.json() + + return [model["id"] for model in data.get("data", [])] + except Exception as e: + msg = "Could not retrieve models. Please, make sure the LM Studio server is running." + raise ValueError(msg) from e + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + range_spec=RangeSpec(min=0, max=128000), + ), + DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + refresh_button=True, + ), + StrInput( + name="base_url", + display_name="Base URL", + advanced=False, + info="Endpoint of the LM Studio API. Defaults to 'http://localhost:1234/v1' if not specified.", + value="http://localhost:1234/v1", + ), + SecretStrInput( + name="api_key", + display_name="LM Studio API Key", + info="The LM Studio API Key to use for LM Studio.", + advanced=True, + value="LMSTUDIO_API_KEY", + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + IntInput( + name="seed", + display_name="Seed", + info="The seed controls the reproducibility of the job.", + advanced=True, + value=1, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + lmstudio_api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + model_kwargs = self.model_kwargs or {} + base_url = self.base_url or "http://localhost:1234/v1" + seed = self.seed + + api_key = SecretStr(lmstudio_api_key) if lmstudio_api_key else None + + return ChatOpenAI( + max_tokens=max_tokens or None, + model_kwargs=model_kwargs, + model=model_name, + base_url=base_url, + api_key=api_key, + temperature=temperature if temperature is not None else 0.1, + seed=seed, + ) + + def _get_exception_message(self, e: Exception): + """Get a message from an LM Studio exception. + + Args: + e (Exception): The exception to get the message from. + + Returns: + str: The message from the exception. + """ + try: + from openai import BadRequestError + except ImportError: + return None + if isinstance(e, BadRequestError): + message = e.body.get("message") + if message: + return message + return None diff --git a/src/backend/base/langflow/components/models/maritalk.py b/src/backend/base/langflow/components/models/maritalk.py new file mode 100644 index 000000000000..80b37d2de989 --- /dev/null +++ b/src/backend/base/langflow/components/models/maritalk.py @@ -0,0 +1,60 @@ +from langchain_community.chat_models import ChatMaritalk + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput +from langflow.inputs.inputs import HandleInput + + +class MaritalkModelComponent(LCModelComponent): + display_name = "Maritalk" + description = "Generates text using Maritalk LLMs." + icon = "Maritalk" + name = "Maritalk" + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + value=512, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=["sabia-2-small", "sabia-2-medium"], + value=["sabia-2-small"], + ), + SecretStrInput( + name="api_key", + display_name="Maritalk API Key", + info="The Maritalk API Key to use for the OpenAI model.", + advanced=False, + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1, range_spec=RangeSpec(min=0, max=1)), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # self.output_schea is a list of dictionarie s + # let's convert it to a dictionary + api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + + return ChatMaritalk( + max_tokens=max_tokens, + model=model_name, + api_key=api_key, + temperature=temperature or 0.1, + ) diff --git a/src/backend/base/langflow/components/models/mistral.py b/src/backend/base/langflow/components/models/mistral.py new file mode 100644 index 000000000000..26f7226acf56 --- /dev/null +++ b/src/backend/base/langflow/components/models/mistral.py @@ -0,0 +1,94 @@ +from langchain_mistralai import ChatMistralAI +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import BoolInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput + + +class MistralAIModelComponent(LCModelComponent): + display_name = "MistralAI" + description = "Generates text using MistralAI LLMs." + icon = "MistralAI" + name = "MistralModel" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=[ + "open-mixtral-8x7b", + "open-mixtral-8x22b", + "mistral-small-latest", + "mistral-medium-latest", + "mistral-large-latest", + "codestral-latest", + ], + value="codestral-latest", + ), + StrInput( + name="mistral_api_base", + display_name="Mistral API Base", + advanced=True, + info="The base URL of the Mistral API. Defaults to https://api.mistral.ai/v1. " + "You can change this to use other APIs like JinaChat, LocalAI and Prem.", + ), + SecretStrInput( + name="api_key", + display_name="Mistral API Key", + info="The Mistral API Key to use for the Mistral model.", + advanced=False, + ), + FloatInput(name="temperature", display_name="Temperature", advanced=False, value=0.5), + IntInput(name="max_retries", display_name="Max Retries", advanced=True, value=5), + IntInput(name="timeout", display_name="Timeout", advanced=True, value=60), + IntInput(name="max_concurrent_requests", display_name="Max Concurrent Requests", advanced=True, value=3), + FloatInput(name="top_p", display_name="Top P", advanced=True, value=1), + IntInput(name="random_seed", display_name="Random Seed", value=1, advanced=True), + BoolInput(name="safe_mode", display_name="Safe Mode", advanced=True), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + mistral_api_key = self.api_key + temperature = self.temperature + model_name = self.model_name + max_tokens = self.max_tokens + mistral_api_base = self.mistral_api_base or "https://api.mistral.ai/v1" + max_retries = self.max_retries + timeout = self.timeout + max_concurrent_requests = self.max_concurrent_requests + top_p = self.top_p + random_seed = self.random_seed + safe_mode = self.safe_mode + + api_key = SecretStr(mistral_api_key).get_secret_value() if mistral_api_key else None + + return ChatMistralAI( + max_tokens=max_tokens or None, + model_name=model_name, + endpoint=mistral_api_base, + api_key=api_key, + temperature=temperature, + max_retries=max_retries, + timeout=timeout, + max_concurrent_requests=max_concurrent_requests, + top_p=top_p, + random_seed=random_seed, + safe_mode=safe_mode, + ) diff --git a/src/backend/base/langflow/components/models/nvidia.py b/src/backend/base/langflow/components/models/nvidia.py new file mode 100644 index 000000000000..998a59dd6cb2 --- /dev/null +++ b/src/backend/base/langflow/components/models/nvidia.py @@ -0,0 +1,91 @@ +from typing import Any + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs import DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput +from langflow.inputs.inputs import HandleInput +from langflow.schema.dotdict import dotdict + + +class NVIDIAModelComponent(LCModelComponent): + display_name = "NVIDIA" + description = "Generates text using NVIDIA LLMs." + icon = "NVIDIA" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=["mistralai/mixtral-8x7b-instruct-v0.1"], + value="mistralai/mixtral-8x7b-instruct-v0.1", + ), + StrInput( + name="base_url", + display_name="NVIDIA Base URL", + value="https://integrate.api.nvidia.com/v1", + refresh_button=True, + info="The base URL of the NVIDIA API. Defaults to https://integrate.api.nvidia.com/v1.", + ), + SecretStrInput( + name="nvidia_api_key", + display_name="NVIDIA API Key", + info="The NVIDIA API Key.", + advanced=False, + value="NVIDIA_API_KEY", + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + IntInput( + name="seed", + display_name="Seed", + info="The seed controls the reproducibility of the job.", + advanced=True, + value=1, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "base_url" and field_value: + try: + build_model = self.build_model() + ids = [model.id for model in build_model.available_models] + build_config["model_name"]["options"] = ids + build_config["model_name"]["value"] = ids[0] + except Exception as e: + msg = f"Error getting model names: {e}" + raise ValueError(msg) from e + return build_config + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + try: + from langchain_nvidia_ai_endpoints import ChatNVIDIA + except ImportError as e: + msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model." + raise ImportError(msg) from e + nvidia_api_key = self.nvidia_api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + seed = self.seed + return ChatNVIDIA( + max_tokens=max_tokens or None, + model=model_name, + base_url=self.base_url, + api_key=nvidia_api_key, + temperature=temperature or 0.1, + seed=seed, + ) diff --git a/src/backend/base/langflow/components/models/ollama.py b/src/backend/base/langflow/components/models/ollama.py new file mode 100644 index 000000000000..12efe8b55695 --- /dev/null +++ b/src/backend/base/langflow/components/models/ollama.py @@ -0,0 +1,228 @@ +from typing import Any +from urllib.parse import urljoin + +import httpx +from langchain_ollama import ChatOllama + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, StrInput + + +class ChatOllamaComponent(LCModelComponent): + display_name = "Ollama" + description = "Generate text using Ollama Local LLMs." + icon = "Ollama" + name = "OllamaModel" + + def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): + if field_name == "mirostat": + if field_value == "Disabled": + build_config["mirostat_eta"]["advanced"] = True + build_config["mirostat_tau"]["advanced"] = True + build_config["mirostat_eta"]["value"] = None + build_config["mirostat_tau"]["value"] = None + + else: + build_config["mirostat_eta"]["advanced"] = False + build_config["mirostat_tau"]["advanced"] = False + + if field_value == "Mirostat 2.0": + build_config["mirostat_eta"]["value"] = 0.2 + build_config["mirostat_tau"]["value"] = 10 + else: + build_config["mirostat_eta"]["value"] = 0.1 + build_config["mirostat_tau"]["value"] = 5 + + if field_name == "model_name": + base_url_dict = build_config.get("base_url", {}) + base_url_load_from_db = base_url_dict.get("load_from_db", False) + base_url_value = base_url_dict.get("value") + if base_url_load_from_db: + base_url_value = self.variables(base_url_value, field_name) + elif not base_url_value: + base_url_value = "http://localhost:11434" + build_config["model_name"]["options"] = self.get_model(base_url_value) + if field_name == "keep_alive_flag": + if field_value == "Keep": + build_config["keep_alive"]["value"] = "-1" + build_config["keep_alive"]["advanced"] = True + elif field_value == "Immediately": + build_config["keep_alive"]["value"] = "0" + build_config["keep_alive"]["advanced"] = True + else: + build_config["keep_alive"]["advanced"] = False + + return build_config + + def get_model(self, base_url_value: str) -> list[str]: + try: + url = urljoin(base_url_value, "/api/tags") + with httpx.Client() as client: + response = client.get(url) + response.raise_for_status() + data = response.json() + + return [model["name"] for model in data.get("models", [])] + except Exception as e: + msg = "Could not retrieve models. Please, make sure Ollama is running." + raise ValueError(msg) from e + + inputs = [ + StrInput( + name="base_url", + display_name="Base URL", + info="Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.", + value="http://localhost:11434", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + value="llama3.1", + info="Refer to https://ollama.com/library for more models.", + refresh_button=True, + ), + FloatInput( + name="temperature", + display_name="Temperature", + value=0.2, + info="Controls the creativity of model responses.", + ), + StrInput( + name="format", display_name="Format", info="Specify the format of the output (e.g., json).", advanced=True + ), + DictInput(name="metadata", display_name="Metadata", info="Metadata to add to the run trace.", advanced=True), + DropdownInput( + name="mirostat", + display_name="Mirostat", + options=["Disabled", "Mirostat", "Mirostat 2.0"], + info="Enable/disable Mirostat sampling for controlling perplexity.", + value="Disabled", + advanced=True, + real_time_refresh=True, + ), + FloatInput( + name="mirostat_eta", + display_name="Mirostat Eta", + info="Learning rate for Mirostat algorithm. (Default: 0.1)", + advanced=True, + ), + FloatInput( + name="mirostat_tau", + display_name="Mirostat Tau", + info="Controls the balance between coherence and diversity of the output. (Default: 5.0)", + advanced=True, + ), + IntInput( + name="num_ctx", + display_name="Context Window Size", + info="Size of the context window for generating tokens. (Default: 2048)", + advanced=True, + ), + IntInput( + name="num_gpu", + display_name="Number of GPUs", + info="Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)", + advanced=True, + ), + IntInput( + name="num_thread", + display_name="Number of Threads", + info="Number of threads to use during computation. (Default: detected for optimal performance)", + advanced=True, + ), + IntInput( + name="repeat_last_n", + display_name="Repeat Last N", + info="How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)", + advanced=True, + ), + FloatInput( + name="repeat_penalty", + display_name="Repeat Penalty", + info="Penalty for repetitions in generated text. (Default: 1.1)", + advanced=True, + ), + FloatInput(name="tfs_z", display_name="TFS Z", info="Tail free sampling value. (Default: 1)", advanced=True), + IntInput(name="timeout", display_name="Timeout", info="Timeout for the request stream.", advanced=True), + IntInput( + name="top_k", display_name="Top K", info="Limits token selection to top K. (Default: 40)", advanced=True + ), + FloatInput(name="top_p", display_name="Top P", info="Works together with top-k. (Default: 0.9)", advanced=True), + BoolInput(name="verbose", display_name="Verbose", info="Whether to print out response text.", advanced=True), + StrInput( + name="tags", + display_name="Tags", + info="Comma-separated list of tags to add to the run trace.", + advanced=True, + ), + StrInput( + name="stop_tokens", + display_name="Stop Tokens", + info="Comma-separated list of tokens to signal the model to stop generating text.", + advanced=True, + ), + StrInput(name="system", display_name="System", info="System to use for generating text.", advanced=True), + StrInput(name="template", display_name="Template", info="Template to use for generating text.", advanced=True), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + *LCModelComponent._base_inputs, + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # Mapping mirostat settings to their corresponding values + mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} + + # Default to 0 for 'Disabled' + mirostat_value = mirostat_options.get(self.mirostat, 0) + + # Set mirostat_eta and mirostat_tau to None if mirostat is disabled + if mirostat_value == 0: + mirostat_eta = None + mirostat_tau = None + else: + mirostat_eta = self.mirostat_eta + mirostat_tau = self.mirostat_tau + + # Mapping system settings to their corresponding values + llm_params = { + "base_url": self.base_url, + "model": self.model_name, + "mirostat": mirostat_value, + "format": self.format, + "metadata": self.metadata, + "tags": self.tags.split(",") if self.tags else None, + "mirostat_eta": mirostat_eta, + "mirostat_tau": mirostat_tau, + "num_ctx": self.num_ctx or None, + "num_gpu": self.num_gpu or None, + "num_thread": self.num_thread or None, + "repeat_last_n": self.repeat_last_n or None, + "repeat_penalty": self.repeat_penalty or None, + "temperature": self.temperature or None, + "stop": self.stop_tokens.split(",") if self.stop_tokens else None, + "system": self.system, + "template": self.template, + "tfs_z": self.tfs_z or None, + "timeout": self.timeout or None, + "top_k": self.top_k or None, + "top_p": self.top_p or None, + "verbose": self.verbose, + } + + # Remove parameters with None values + llm_params = {k: v for k, v in llm_params.items() if v is not None} + + try: + output = ChatOllama(**llm_params) + except Exception as e: + msg = "Could not initialize Ollama LLM." + raise ValueError(msg) from e + + return output diff --git a/src/backend/base/langflow/components/models/openai.py b/src/backend/base/langflow/components/models/openai.py new file mode 100644 index 000000000000..63d999a4b7c6 --- /dev/null +++ b/src/backend/base/langflow/components/models/openai.py @@ -0,0 +1,138 @@ +import operator +from functools import reduce + +from langchain_openai import ChatOpenAI +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.base.models.openai_constants import OPENAI_MODEL_NAMES +from langflow.field_typing import LanguageModel +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput +from langflow.inputs.inputs import HandleInput + + +class OpenAIModelComponent(LCModelComponent): + display_name = "OpenAI" + description = "Generates text using OpenAI LLMs." + icon = "OpenAI" + name = "OpenAIModel" + + inputs = [ + *LCModelComponent._base_inputs, + IntInput( + name="max_tokens", + display_name="Max Tokens", + advanced=True, + info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + range_spec=RangeSpec(min=0, max=128000), + ), + DictInput( + name="model_kwargs", + display_name="Model Kwargs", + advanced=True, + info="Additional keyword arguments to pass to the model.", + ), + BoolInput( + name="json_mode", + display_name="JSON Mode", + advanced=True, + info="If True, it will output JSON regardless of passing a schema.", + ), + DictInput( + name="output_schema", + is_list=True, + display_name="Schema", + advanced=True, + info="The schema for the Output of the model. " + "You must pass the word JSON in the prompt. " + "If left blank, JSON mode will be disabled. [DEPRECATED]", + ), + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=OPENAI_MODEL_NAMES, + value=OPENAI_MODEL_NAMES[0], + ), + StrInput( + name="openai_api_base", + display_name="OpenAI API Base", + advanced=True, + info="The base URL of the OpenAI API. " + "Defaults to https://api.openai.com/v1. " + "You can change this to use other APIs like JinaChat, LocalAI and Prem.", + ), + SecretStrInput( + name="api_key", + display_name="OpenAI API Key", + info="The OpenAI API Key to use for the OpenAI model.", + advanced=False, + value="OPENAI_API_KEY", + ), + FloatInput(name="temperature", display_name="Temperature", value=0.1), + IntInput( + name="seed", + display_name="Seed", + info="The seed controls the reproducibility of the job.", + advanced=True, + value=1, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + # self.output_schema is a list of dictionaries + # let's convert it to a dictionary + output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {}) + openai_api_key = self.api_key + temperature = self.temperature + model_name: str = self.model_name + max_tokens = self.max_tokens + model_kwargs = self.model_kwargs or {} + openai_api_base = self.openai_api_base or "https://api.openai.com/v1" + json_mode = bool(output_schema_dict) or self.json_mode + seed = self.seed + + api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None + output = ChatOpenAI( + max_tokens=max_tokens or None, + model_kwargs=model_kwargs, + model=model_name, + base_url=openai_api_base, + api_key=api_key, + temperature=temperature if temperature is not None else 0.1, + seed=seed, + ) + if json_mode: + if output_schema_dict: + output = output.with_structured_output(schema=output_schema_dict, method="json_mode") + else: + output = output.bind(response_format={"type": "json_object"}) + + return output + + def _get_exception_message(self, e: Exception): + """Get a message from an OpenAI exception. + + Args: + e (Exception): The exception to get the message from. + + Returns: + str: The message from the exception. + """ + try: + from openai import BadRequestError + except ImportError: + return None + if isinstance(e, BadRequestError): + message = e.body.get("message") + if message: + return message + return None diff --git a/src/backend/base/langflow/components/models/perplexity.py b/src/backend/base/langflow/components/models/perplexity.py new file mode 100644 index 000000000000..a1adc50e2d48 --- /dev/null +++ b/src/backend/base/langflow/components/models/perplexity.py @@ -0,0 +1,89 @@ +from langchain_community.chat_models import ChatPerplexity +from pydantic.v1 import SecretStr + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs.inputs import HandleInput +from langflow.io import DropdownInput, FloatInput, IntInput, SecretStrInput + + +class PerplexityComponent(LCModelComponent): + display_name = "Perplexity" + description = "Generate text using Perplexity LLMs." + documentation = "https://python.langchain.com/v0.2/docs/integrations/chat/perplexity/" + icon = "Perplexity" + name = "PerplexityModel" + + inputs = [ + *LCModelComponent._base_inputs, + DropdownInput( + name="model_name", + display_name="Model Name", + advanced=False, + options=[ + "llama-3.1-sonar-small-128k-online", + "llama-3.1-sonar-large-128k-online", + "llama-3.1-sonar-huge-128k-online", + "llama-3.1-sonar-small-128k-chat", + "llama-3.1-sonar-large-128k-chat", + "llama-3.1-8b-instruct", + "llama-3.1-70b-instruct", + ], + value="llama-3.1-sonar-small-128k-online", + ), + IntInput( + name="max_output_tokens", display_name="Max Output Tokens", info="The maximum number of tokens to generate." + ), + SecretStrInput( + name="api_key", + display_name="Perplexity API Key", + info="The Perplexity API Key to use for the Perplexity model.", + advanced=False, + ), + FloatInput(name="temperature", display_name="Temperature", value=0.75), + FloatInput( + name="top_p", + display_name="Top P", + info="The maximum cumulative probability of tokens to consider when sampling.", + advanced=True, + ), + IntInput( + name="n", + display_name="N", + info="Number of chat completions to generate for each prompt. " + "Note that the API may not return the full n completions if duplicates are generated.", + advanced=True, + ), + IntInput( + name="top_k", + display_name="Top K", + info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", + advanced=True, + ), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: # type: ignore[type-var] + api_key = SecretStr(self.api_key).get_secret_value() + temperature = self.temperature + model = self.model_name + max_output_tokens = self.max_output_tokens + top_k = self.top_k + top_p = self.top_p + n = self.n + + return ChatPerplexity( + model=model, + temperature=temperature or 0.75, + pplx_api_key=api_key, + top_k=top_k or None, + top_p=top_p or None, + n=n or 1, + max_output_tokens=max_output_tokens, + ) diff --git a/src/backend/base/langflow/components/models/vertexai.py b/src/backend/base/langflow/components/models/vertexai.py new file mode 100644 index 000000000000..638d9eb6de39 --- /dev/null +++ b/src/backend/base/langflow/components/models/vertexai.py @@ -0,0 +1,79 @@ +from typing import cast + +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import LanguageModel +from langflow.inputs import MessageTextInput +from langflow.inputs.inputs import HandleInput +from langflow.io import BoolInput, FileInput, FloatInput, IntInput, StrInput + + +class ChatVertexAIComponent(LCModelComponent): + display_name = "Vertex AI" + description = "Generate text using Vertex AI LLMs." + icon = "VertexAI" + name = "VertexAiModel" + + inputs = [ + *LCModelComponent._base_inputs, + FileInput( + name="credentials", + display_name="Credentials", + info="JSON credentials file. Leave empty to fallback to environment variables", + file_types=["json"], + ), + MessageTextInput(name="model_name", display_name="Model Name", value="gemini-1.5-pro"), + StrInput(name="project", display_name="Project", info="The project ID.", advanced=True), + StrInput(name="location", display_name="Location", value="us-central1", advanced=True), + IntInput(name="max_output_tokens", display_name="Max Output Tokens", advanced=True), + IntInput(name="max_retries", display_name="Max Retries", value=1, advanced=True), + FloatInput(name="temperature", value=0.0, display_name="Temperature"), + IntInput(name="top_k", display_name="Top K", advanced=True), + FloatInput(name="top_p", display_name="Top P", value=0.95, advanced=True), + BoolInput(name="verbose", display_name="Verbose", value=False, advanced=True), + HandleInput( + name="output_parser", + display_name="Output Parser", + info="The parser to use to parse the output of the model", + advanced=True, + input_types=["OutputParser"], + ), + ] + + def build_model(self) -> LanguageModel: + try: + from langchain_google_vertexai import ChatVertexAI + except ImportError as e: + msg = "Please install the langchain-google-vertexai package to use the VertexAIEmbeddings component." + raise ImportError(msg) from e + location = self.location or None + if self.credentials: + from google.cloud import aiplatform + from google.oauth2 import service_account + + credentials = service_account.Credentials.from_service_account_file(self.credentials) + project = self.project or credentials.project_id + # ChatVertexAI sometimes skip manual credentials initialization + aiplatform.init( + project=project, + location=location, + credentials=credentials, + ) + else: + project = self.project or None + credentials = None + + return cast( + LanguageModel, + ChatVertexAI( + credentials=credentials, + location=location, + project=project, + max_output_tokens=self.max_output_tokens or None, + max_retries=self.max_retries, + model_name=self.model_name, + temperature=self.temperature, + top_k=self.top_k or None, + top_p=self.top_p, + verbose=self.verbose, + ), + ) diff --git a/src/backend/base/langflow/components/nvidia/__init__.py b/src/backend/base/langflow/components/nvidia/__init__.py new file mode 100644 index 000000000000..1e6f375b25e0 --- /dev/null +++ b/src/backend/base/langflow/components/nvidia/__init__.py @@ -0,0 +1,3 @@ +from .nvidia_rerank import NvidiaRerankComponent + +__all__ = ["NvidiaRerankComponent"] diff --git a/src/backend/base/langflow/components/nvidia/nvidia_rerank.py b/src/backend/base/langflow/components/nvidia/nvidia_rerank.py new file mode 100644 index 000000000000..1092e03436e1 --- /dev/null +++ b/src/backend/base/langflow/components/nvidia/nvidia_rerank.py @@ -0,0 +1,98 @@ +from typing import Any, cast + +from langchain.retrievers import ContextualCompressionRetriever + +from langflow.base.vectorstores.model import ( + LCVectorStoreComponent, + check_cached_vector_store, +) +from langflow.field_typing import Retriever, VectorStore +from langflow.io import ( + DropdownInput, + HandleInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data +from langflow.schema.dotdict import dotdict +from langflow.template.field.base import Output + + +class NvidiaRerankComponent(LCVectorStoreComponent): + display_name = "NVIDIA Rerank" + description = "Rerank documents using the NVIDIA API and a retriever." + icon = "NVIDIA" + legacy: bool = True + + inputs = [ + MultilineInput( + name="search_query", + display_name="Search Query", + ), + StrInput( + name="base_url", + display_name="Base URL", + value="https://integrate.api.nvidia.com/v1", + refresh_button=True, + info="The base URL of the NVIDIA API. Defaults to https://integrate.api.nvidia.com/v1.", + ), + DropdownInput( + name="model", + display_name="Model", + options=["nv-rerank-qa-mistral-4b:1"], + value="nv-rerank-qa-mistral-4b:1", + ), + SecretStrInput(name="api_key", display_name="API Key"), + HandleInput(name="retriever", display_name="Retriever", input_types=["Retriever"]), + ] + + outputs = [ + Output( + display_name="Retriever", + name="base_retriever", + method="build_base_retriever", + ), + Output( + display_name="Search Results", + name="search_results", + method="search_documents", + ), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "base_url" and field_value: + try: + build_model = self.build_model() + ids = [model.id for model in build_model.available_models] + build_config["model"]["options"] = ids + build_config["model"]["value"] = ids[0] + except Exception as e: + msg = f"Error getting model names: {e}" + raise ValueError(msg) from e + return build_config + + def build_model(self): + try: + from langchain_nvidia_ai_endpoints import NVIDIARerank + except ImportError as e: + msg = "Please install langchain-nvidia-ai-endpoints to use the NVIDIA model." + raise ImportError(msg) from e + return NVIDIARerank(api_key=self.api_key, model=self.model, base_url=self.base_url) + + def build_base_retriever(self) -> Retriever: # type: ignore[type-var] + nvidia_reranker = self.build_model() + retriever = ContextualCompressionRetriever(base_compressor=nvidia_reranker, base_retriever=self.retriever) + return cast(Retriever, retriever) + + async def search_documents(self) -> list[Data]: # type: ignore[override] + retriever = self.build_base_retriever() + documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) + data = self.to_data(documents) + self.status = data + return data + + @check_cached_vector_store + def build_vector_store(self) -> VectorStore: + msg = "NVIDIA Rerank does not support vector stores." + raise NotImplementedError(msg) diff --git a/src/backend/base/langflow/components/output_parsers/__init__.py b/src/backend/base/langflow/components/output_parsers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/components/outputs/ChatOutput.py b/src/backend/base/langflow/components/outputs/ChatOutput.py deleted file mode 100644 index 45972712c8b7..000000000000 --- a/src/backend/base/langflow/components/outputs/ChatOutput.py +++ /dev/null @@ -1,81 +0,0 @@ -from langflow.base.io.chat import ChatComponent -from langflow.inputs import BoolInput -from langflow.io import DropdownInput, MessageTextInput, Output -from langflow.memory import store_message -from langflow.schema.message import Message -from langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI - - -class ChatOutput(ChatComponent): - display_name = "Chat Output" - description = "Display a chat message in the Playground." - icon = "ChatOutput" - name = "ChatOutput" - - inputs = [ - MessageTextInput( - name="input_value", - display_name="Text", - info="Message to be passed as output.", - ), - BoolInput( - name="should_store_message", - display_name="Store Messages", - info="Store the message in the history.", - value=True, - advanced=True, - ), - DropdownInput( - name="sender", - display_name="Sender Type", - options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], - value=MESSAGE_SENDER_AI, - advanced=True, - info="Type of sender.", - ), - MessageTextInput( - name="sender_name", - display_name="Sender Name", - info="Name of the sender.", - value=MESSAGE_SENDER_NAME_AI, - advanced=True, - ), - MessageTextInput( - name="session_id", - display_name="Session ID", - info="The session ID of the chat. If empty, the current session ID parameter will be used.", - advanced=True, - ), - MessageTextInput( - name="data_template", - display_name="Data Template", - value="{text}", - advanced=True, - info="Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - ), - ] - outputs = [ - Output(display_name="Message", name="message", method="message_response"), - ] - - def message_response(self) -> Message: - message = Message( - text=self.input_value, - sender=self.sender, - sender_name=self.sender_name, - session_id=self.session_id, - ) - if ( - self.session_id - and isinstance(message, Message) - and isinstance(message.text, str) - and self.should_store_message - ): - store_message( - message, - flow_id=self.graph.flow_id, - ) - self.message.value = message - - self.status = message - return message diff --git a/src/backend/base/langflow/components/outputs/TextOutput.py b/src/backend/base/langflow/components/outputs/TextOutput.py deleted file mode 100644 index ee8276b42b77..000000000000 --- a/src/backend/base/langflow/components/outputs/TextOutput.py +++ /dev/null @@ -1,28 +0,0 @@ -from langflow.base.io.text import TextComponent -from langflow.io import MessageTextInput, Output -from langflow.schema.message import Message - - -class TextOutputComponent(TextComponent): - display_name = "Text Output" - description = "Display a text output in the Playground." - icon = "type" - name = "TextOutput" - - inputs = [ - MessageTextInput( - name="input_value", - display_name="Text", - info="Text to be passed as output.", - ), - ] - outputs = [ - Output(display_name="Text", name="text", method="text_response"), - ] - - def text_response(self) -> Message: - message = Message( - text=self.input_value, - ) - self.status = self.input_value - return message diff --git a/src/backend/base/langflow/components/outputs/__init__.py b/src/backend/base/langflow/components/outputs/__init__.py index 35200e0feef1..0a48af2e4a21 100644 --- a/src/backend/base/langflow/components/outputs/__init__.py +++ b/src/backend/base/langflow/components/outputs/__init__.py @@ -1,4 +1,4 @@ -from .ChatOutput import ChatOutput -from .TextOutput import TextOutputComponent +from .chat import ChatOutput +from .text import TextOutputComponent __all__ = ["ChatOutput", "TextOutputComponent"] diff --git a/src/backend/base/langflow/components/outputs/chat.py b/src/backend/base/langflow/components/outputs/chat.py new file mode 100644 index 000000000000..b6751c231295 --- /dev/null +++ b/src/backend/base/langflow/components/outputs/chat.py @@ -0,0 +1,116 @@ +from langflow.base.io.chat import ChatComponent +from langflow.inputs import BoolInput +from langflow.io import DropdownInput, MessageInput, MessageTextInput, Output +from langflow.schema.message import Message +from langflow.schema.properties import Source +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER + + +class ChatOutput(ChatComponent): + display_name = "Chat Output" + description = "Display a chat message in the Playground." + icon = "MessagesSquare" + name = "ChatOutput" + + inputs = [ + MessageInput( + name="input_value", + display_name="Text", + info="Message to be passed as output.", + ), + BoolInput( + name="should_store_message", + display_name="Store Messages", + info="Store the message in the history.", + value=True, + advanced=True, + ), + DropdownInput( + name="sender", + display_name="Sender Type", + options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER], + value=MESSAGE_SENDER_AI, + advanced=True, + info="Type of sender.", + ), + MessageTextInput( + name="sender_name", + display_name="Sender Name", + info="Name of the sender.", + value=MESSAGE_SENDER_NAME_AI, + advanced=True, + ), + MessageTextInput( + name="session_id", + display_name="Session ID", + info="The session ID of the chat. If empty, the current session ID parameter will be used.", + advanced=True, + ), + MessageTextInput( + name="data_template", + display_name="Data Template", + value="{text}", + advanced=True, + info="Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + ), + MessageTextInput( + name="background_color", + display_name="Background Color", + info="The background color of the icon.", + advanced=True, + ), + MessageTextInput( + name="chat_icon", + display_name="Icon", + info="The icon of the message.", + advanced=True, + ), + MessageTextInput( + name="text_color", + display_name="Text Color", + info="The text color of the name", + advanced=True, + ), + ] + outputs = [ + Output( + display_name="Message", + name="message", + method="message_response", + ), + ] + + def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source: + source_dict = {} + if _id: + source_dict["id"] = _id + if display_name: + source_dict["display_name"] = display_name + if source: + source_dict["source"] = source + return Source(**source_dict) + + def message_response(self) -> Message: + _source, _icon, _display_name, _source_id = self.get_properties_from_source_component() + _background_color = self.background_color + _text_color = self.text_color + if self.chat_icon: + _icon = self.chat_icon + message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value) + message.sender = self.sender + message.sender_name = self.sender_name + message.session_id = self.session_id + message.flow_id = self.graph.flow_id if hasattr(self, "graph") else None + message.properties.source = self._build_source(_source_id, _display_name, _source) + message.properties.icon = _icon + message.properties.background_color = _background_color + message.properties.text_color = _text_color + if self.session_id and isinstance(message, Message) and self.should_store_message: + stored_message = self.send_message( + message, + ) + self.message.value = stored_message + message = stored_message + + self.status = message + return message diff --git a/src/backend/base/langflow/components/outputs/text.py b/src/backend/base/langflow/components/outputs/text.py new file mode 100644 index 000000000000..e387d50fbf31 --- /dev/null +++ b/src/backend/base/langflow/components/outputs/text.py @@ -0,0 +1,28 @@ +from langflow.base.io.text import TextComponent +from langflow.io import MultilineInput, Output +from langflow.schema.message import Message + + +class TextOutputComponent(TextComponent): + display_name = "Text Output" + description = "Display a text output in the Playground." + icon = "type" + name = "TextOutput" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Text", + info="Text to be passed as output.", + ), + ] + outputs = [ + Output(display_name="Text", name="text", method="text_response"), + ] + + def text_response(self) -> Message: + message = Message( + text=self.input_value, + ) + self.status = self.input_value + return message diff --git a/src/backend/base/langflow/components/processing/__init__.py b/src/backend/base/langflow/components/processing/__init__.py new file mode 100644 index 000000000000..727dac795a60 --- /dev/null +++ b/src/backend/base/langflow/components/processing/__init__.py @@ -0,0 +1,27 @@ +from .combine_text import CombineTextComponent +from .create_data import CreateDataComponent +from .extract_key import ExtractDataKeyComponent +from .filter_data_values import DataFilterComponent +from .json_cleaner import JSONCleaner +from .merge_data import MergeDataComponent +from .message_to_data import MessageToDataComponent +from .parse_data import ParseDataComponent +from .parse_json_data import ParseJSONDataComponent +from .select_data import SelectDataComponent +from .split_text import SplitTextComponent +from .update_data import UpdateDataComponent + +__all__ = [ + "CreateDataComponent", + "ExtractDataKeyComponent", + "DataFilterComponent", + "MergeDataComponent", + "MessageToDataComponent", + "ParseDataComponent", + "SelectDataComponent", + "UpdateDataComponent", + "ParseJSONDataComponent", + "JSONCleaner", + "CombineTextComponent", + "SplitTextComponent", +] diff --git a/src/backend/base/langflow/components/helpers/CombineText.py b/src/backend/base/langflow/components/processing/combine_text.py similarity index 100% rename from src/backend/base/langflow/components/helpers/CombineText.py rename to src/backend/base/langflow/components/processing/combine_text.py diff --git a/src/backend/base/langflow/components/processing/create_data.py b/src/backend/base/langflow/components/processing/create_data.py new file mode 100644 index 000000000000..6a437664d34e --- /dev/null +++ b/src/backend/base/langflow/components/processing/create_data.py @@ -0,0 +1,109 @@ +from typing import Any + +from langflow.custom import Component +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs.inputs import BoolInput, DictInput, IntInput, MessageTextInput +from langflow.io import Output +from langflow.schema import Data +from langflow.schema.dotdict import dotdict + + +class CreateDataComponent(Component): + display_name: str = "Create Data" + description: str = "Dynamically create a Data with a specified number of fields." + name: str = "CreateData" + MAX_FIELDS = 15 # Define a constant for maximum number of fields + legacy = True + + inputs = [ + IntInput( + name="number_of_fields", + display_name="Number of Fields", + info="Number of fields to be added to the record.", + real_time_refresh=True, + value=1, + range_spec=RangeSpec(min=1, max=MAX_FIELDS, step=1, step_type="int"), + ), + MessageTextInput( + name="text_key", + display_name="Text Key", + info="Key that identifies the field to be used as the text content.", + advanced=True, + ), + BoolInput( + name="text_key_validator", + display_name="Text Key Validator", + advanced=True, + info="If enabled, checks if the given 'Text Key' is present in the given 'Data'.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="build_data"), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "number_of_fields": + default_keys = ["code", "_type", "number_of_fields", "text_key", "text_key_validator"] + try: + field_value_int = int(field_value) + except ValueError: + return build_config + existing_fields = {} + if field_value_int > self.MAX_FIELDS: + build_config["number_of_fields"]["value"] = self.MAX_FIELDS + msg = ( + f"Number of fields cannot exceed {self.MAX_FIELDS}. " + "Please adjust the number of fields to be within the allowed limit." + ) + raise ValueError(msg) + if len(build_config) > len(default_keys): + # back up the existing template fields + for key in build_config.copy(): + if key not in default_keys: + existing_fields[key] = build_config.pop(key) + + for i in range(1, field_value_int + 1): + key = f"field_{i}_key" + if key in existing_fields: + field = existing_fields[key] + build_config[key] = field + else: + field = DictInput( + display_name=f"Field {i}", + name=key, + info=f"Key for field {i}.", + input_types=["Text", "Data"], + ) + build_config[field.name] = field.to_dict() + + build_config["number_of_fields"]["value"] = field_value_int + return build_config + + async def build_data(self) -> Data: + data = self.get_data() + return_data = Data(data=data, text_key=self.text_key) + self.status = return_data + if self.text_key_validator: + self.validate_text_key() + return return_data + + def get_data(self): + """Function to get the Data from the attributes.""" + data = {} + for value_dict in self._attributes.values(): + if isinstance(value_dict, dict): + # Check if the value of the value_dict is a Data + _value_dict = { + key: value.get_text() if isinstance(value, Data) else value for key, value in value_dict.items() + } + data.update(_value_dict) + return data + + def validate_text_key(self) -> None: + """This function validates that the Text Key is one of the keys in the Data.""" + data_keys = self.get_data().keys() + if self.text_key not in data_keys and self.text_key != "": + formatted_data_keys = ", ".join(data_keys) + msg = f"Text Key: '{self.text_key}' not found in the Data keys: '{formatted_data_keys}'" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/processing/extract_key.py b/src/backend/base/langflow/components/processing/extract_key.py new file mode 100644 index 000000000000..eaeb82b72f07 --- /dev/null +++ b/src/backend/base/langflow/components/processing/extract_key.py @@ -0,0 +1,53 @@ +from langflow.custom import Component +from langflow.io import DataInput, Output, StrInput +from langflow.schema import Data + + +class ExtractDataKeyComponent(Component): + display_name = "Extract Key" + description = ( + "Extract a specific key from a Data object or a list of " + "Data objects and return the extracted value(s) as Data object(s)." + ) + icon = "key" + name = "ExtractaKey" + legacy = True + + inputs = [ + DataInput( + name="data_input", + display_name="Data Input", + info="The Data object or list of Data objects to extract the key from.", + ), + StrInput( + name="key", + display_name="Key to Extract", + info="The key in the Data object(s) to extract.", + ), + ] + + outputs = [ + Output(display_name="Extracted Data", name="extracted_data", method="extract_key"), + ] + + def extract_key(self) -> Data | list[Data]: + key = self.key + + if isinstance(self.data_input, list): + result = [] + for item in self.data_input: + if isinstance(item, Data) and key in item.data: + extracted_value = item.data[key] + result.append(Data(data={key: extracted_value})) + self.status = result + return result + if isinstance(self.data_input, Data): + if key in self.data_input.data: + extracted_value = self.data_input.data[key] + result = Data(data={key: extracted_value}) + self.status = result + return result + self.status = f"Key '{key}' not found in Data object." + return Data(data={"error": f"Key '{key}' not found in Data object."}) + self.status = "Invalid input. Expected Data object or list of Data objects." + return Data(data={"error": "Invalid input. Expected Data object or list of Data objects."}) diff --git a/src/backend/base/langflow/components/processing/filter_data.py b/src/backend/base/langflow/components/processing/filter_data.py new file mode 100644 index 000000000000..bbdfe681cbee --- /dev/null +++ b/src/backend/base/langflow/components/processing/filter_data.py @@ -0,0 +1,41 @@ +from langflow.custom import Component +from langflow.io import DataInput, MessageTextInput, Output +from langflow.schema import Data + + +class FilterDataComponent(Component): + display_name = "Filter Data" + description = "Filters a Data object based on a list of keys." + icon = "filter" + beta = True + name = "FilterData" + + inputs = [ + DataInput( + name="data", + display_name="Data", + info="Data object to filter.", + ), + MessageTextInput( + name="filter_criteria", + display_name="Filter Criteria", + info="List of keys to filter by.", + is_list=True, + ), + ] + + outputs = [ + Output(display_name="Filtered Data", name="filtered_data", method="filter_data"), + ] + + def filter_data(self) -> Data: + filter_criteria: list[str] = self.filter_criteria + data = self.data.data if isinstance(self.data, Data) else {} + + # Filter the data + filtered = {key: value for key, value in data.items() if key in filter_criteria} + + # Create a new Data object with the filtered data + filtered_data = Data(data=filtered) + self.status = filtered_data + return filtered_data diff --git a/src/backend/base/langflow/components/processing/filter_data_values.py b/src/backend/base/langflow/components/processing/filter_data_values.py new file mode 100644 index 000000000000..ff3afb132382 --- /dev/null +++ b/src/backend/base/langflow/components/processing/filter_data_values.py @@ -0,0 +1,87 @@ +from typing import Any + +from langflow.custom import Component +from langflow.io import DataInput, DropdownInput, MessageTextInput, Output +from langflow.schema import Data + + +class DataFilterComponent(Component): + display_name = "Filter Values" + description = ( + "Filter a list of data items based on a specified key, filter value," + " and comparison operator. Check advanced options to select match comparision." + ) + icon = "filter" + beta = True + name = "FilterDataValues" + + inputs = [ + DataInput(name="input_data", display_name="Input Data", info="The list of data items to filter.", is_list=True), + MessageTextInput( + name="filter_key", + display_name="Filter Key", + info="The key to filter on (e.g., 'route').", + value="route", + input_types=["Data"], + ), + MessageTextInput( + name="filter_value", + display_name="Filter Value", + info="The value to filter by (e.g., 'CMIP').", + value="CMIP", + input_types=["Data"], + ), + DropdownInput( + name="operator", + display_name="Comparison Operator", + options=["equals", "not equals", "contains", "starts with", "ends with"], + info="The operator to apply for comparing the values.", + value="equals", + advanced=True, + ), + ] + + outputs = [ + Output(display_name="Filtered Data", name="filtered_data", method="filter_data"), + ] + + def compare_values(self, item_value: Any, filter_value: str, operator: str) -> bool: + if operator == "equals": + return str(item_value) == filter_value + if operator == "not equals": + return str(item_value) != filter_value + if operator == "contains": + return filter_value in str(item_value) + if operator == "starts with": + return str(item_value).startswith(filter_value) + if operator == "ends with": + return str(item_value).endswith(filter_value) + return False + + def filter_data(self) -> list[Data]: + # Extract inputs + input_data: list[Data] = self.input_data + filter_key: str = self.filter_key.text + filter_value: str = self.filter_value.text + operator: str = self.operator + + # Validate inputs + if not input_data: + self.status = "Input data is empty." + return [] + + if not filter_key or not filter_value: + self.status = "Filter key or value is missing." + return input_data + + # Filter the data + filtered_data = [] + for item in input_data: + if isinstance(item.data, dict) and filter_key in item.data: + if self.compare_values(item.data[filter_key], filter_value, operator): + filtered_data.append(item) + else: + self.status = f"Warning: Some items don't have the key '{filter_key}' or are not dictionaries." + + self.status = filtered_data + return filtered_data diff --git a/src/backend/base/langflow/components/processing/json_cleaner.py b/src/backend/base/langflow/components/processing/json_cleaner.py new file mode 100644 index 000000000000..d9d6051c8107 --- /dev/null +++ b/src/backend/base/langflow/components/processing/json_cleaner.py @@ -0,0 +1,99 @@ +import json +import re +import unicodedata + +from langflow.custom import Component +from langflow.inputs import BoolInput, MessageTextInput +from langflow.schema.message import Message +from langflow.template import Output + + +class JSONCleaner(Component): + icon = "braces" + display_name = "JSON Cleaner" + description = ( + "Cleans the messy and sometimes incorrect JSON strings produced by LLMs " + "so that they are fully compliant with the JSON spec." + ) + + inputs = [ + MessageTextInput( + name="json_str", display_name="JSON String", info="The JSON string to be cleaned.", required=True + ), + BoolInput( + name="remove_control_chars", + display_name="Remove Control Characters", + info="Remove control characters from the JSON string.", + required=False, + ), + BoolInput( + name="normalize_unicode", + display_name="Normalize Unicode", + info="Normalize Unicode characters in the JSON string.", + required=False, + ), + BoolInput( + name="validate_json", + display_name="Validate JSON", + info="Validate the JSON string to ensure it is well-formed.", + required=False, + ), + ] + + outputs = [ + Output(display_name="Cleaned JSON String", name="output", method="clean_json"), + ] + + def clean_json(self) -> Message: + try: + from json_repair import repair_json + except ImportError as e: + msg = "Could not import the json_repair package. Please install it with `pip install json_repair`." + raise ImportError(msg) from e + + """Clean the input JSON string based on provided options and return the cleaned JSON string.""" + json_str = self.json_str + remove_control_chars = self.remove_control_chars + normalize_unicode = self.normalize_unicode + validate_json = self.validate_json + + start = json_str.find("{") + end = json_str.rfind("}") + if start == -1 or end == -1: + msg = "Invalid JSON string: Missing '{' or '}'" + raise ValueError(msg) + try: + json_str = json_str[start : end + 1] + + if remove_control_chars: + json_str = self._remove_control_characters(json_str) + if normalize_unicode: + json_str = self._normalize_unicode(json_str) + if validate_json: + json_str = self._validate_json(json_str) + + cleaned_json_str = repair_json(json_str) + result = str(cleaned_json_str) + + self.status = result + return Message(text=result) + except Exception as e: + msg = f"Error cleaning JSON string: {e}" + raise ValueError(msg) from e + + def _remove_control_characters(self, s: str) -> str: + """Remove control characters from the string.""" + return re.sub(r"[\x00-\x1F\x7F]", "", s) + + def _normalize_unicode(self, s: str) -> str: + """Normalize Unicode characters in the string.""" + return unicodedata.normalize("NFC", s) + + def _validate_json(self, s: str) -> str: + """Validate the JSON string.""" + try: + json.loads(s) + except json.JSONDecodeError as e: + msg = f"Invalid JSON string: {e}" + raise ValueError(msg) from e + return s diff --git a/src/backend/base/langflow/components/processing/merge_data.py b/src/backend/base/langflow/components/processing/merge_data.py new file mode 100644 index 000000000000..b82d8160aa92 --- /dev/null +++ b/src/backend/base/langflow/components/processing/merge_data.py @@ -0,0 +1,94 @@ +from loguru import logger + +from langflow.custom import Component +from langflow.io import DataInput, Output +from langflow.schema import Data + + +class MergeDataComponent(Component): + """MergeDataComponent is responsible for combining multiple Data objects into a unified list of Data objects. + + It ensures that all keys across the input Data objects are present in each merged Data object. + Missing keys are filled with empty strings to maintain consistency. + """ + + display_name = "Merge Data" + description = ( + "Combines multiple Data objects into a unified list, ensuring all keys are present in each Data object." + ) + icon = "merge" + + inputs = [ + DataInput( + name="data_inputs", + display_name="Data Inputs", + is_list=True, + info="A list of Data inputs objects to be merged.", + ), + ] + + outputs = [ + Output( + display_name="Merged Data", + name="merged_data", + method="merge_data", + ), + ] + + def merge_data(self) -> list[Data]: + """Merges multiple Data objects into a single list of Data objects. + + Ensures that all keys from the input Data objects are present in each merged Data object. + Missing keys are filled with empty strings. + + Returns: + List[Data]: A list of merged Data objects with consistent keys. + """ + logger.info("Initiating the data merging process.") + + data_inputs: list[Data] = self.data_inputs + logger.debug(f"Received {len(data_inputs)} data input(s) for merging.") + + if not data_inputs: + logger.warning("No data inputs provided. Returning an empty list.") + return [] + + # Collect all unique keys from all Data objects + all_keys: set[str] = set() + for idx, data_input in enumerate(data_inputs): + if not isinstance(data_input, Data): + error_message = f"Data input at index {idx} is not of type Data." + logger.error(error_message) + type_error_message = ( + f"All items in data_inputs must be of type Data. Item at index {idx} is {type(data_input)}" + ) + raise TypeError(type_error_message) + all_keys.update(data_input.data.keys()) + logger.debug(f"Collected {len(all_keys)} unique key(s) from input data.") + + try: + # Create new list of Data objects with missing keys filled with empty strings + merged_data_list = [] + for idx, data_input in enumerate(data_inputs): + merged_data_dict = {} + + for key in all_keys: + # Use the existing value if the key exists, otherwise use an empty string + value = data_input.data.get(key, "") + if key not in data_input.data: + log_message = f"Key '{key}' missing in data input at index {idx}. " "Assigning empty string." + logger.debug(log_message) + merged_data_dict[key] = value + + merged_data = Data( + text_key=data_input.text_key, data=merged_data_dict, default_value=data_input.default_value + ) + merged_data_list.append(merged_data) + logger.debug("Merged Data object created for input at index: " + str(idx)) + + except Exception: + logger.exception("An error occurred during the data merging process.") + raise + + logger.info("Data merging process completed successfully.") + return merged_data_list diff --git a/src/backend/base/langflow/components/processing/message_to_data.py b/src/backend/base/langflow/components/processing/message_to_data.py new file mode 100644 index 000000000000..f5303deb57c9 --- /dev/null +++ b/src/backend/base/langflow/components/processing/message_to_data.py @@ -0,0 +1,36 @@ +from loguru import logger + +from langflow.custom import Component +from langflow.io import MessageInput, Output +from langflow.schema import Data +from langflow.schema.message import Message + + +class MessageToDataComponent(Component): + display_name = "Message to Data" + description = "Convert a Message object to a Data object" + icon = "message-square-share" + beta = True + name = "MessagetoData" + + inputs = [ + MessageInput( + name="message", + display_name="Message", + info="The Message object to convert to a Data object", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="convert_message_to_data"), + ] + + def convert_message_to_data(self) -> Data: + if isinstance(self.message, Message): + # Convert Message to Data + return Data(data=self.message.data) + + msg = "Error converting Message to Data: Input must be a Message object" + logger.opt(exception=True).debug(msg) + self.status = msg + return Data(data={"error": msg}) diff --git a/src/backend/base/langflow/components/processing/parse_data.py b/src/backend/base/langflow/components/processing/parse_data.py new file mode 100644 index 000000000000..8e1156bcc65d --- /dev/null +++ b/src/backend/base/langflow/components/processing/parse_data.py @@ -0,0 +1,35 @@ +from langflow.custom import Component +from langflow.helpers.data import data_to_text +from langflow.io import DataInput, MultilineInput, Output, StrInput +from langflow.schema.message import Message + + +class ParseDataComponent(Component): + display_name = "Parse Data" + description = "Convert Data into plain text following a specified template." + icon = "braces" + name = "ParseData" + + inputs = [ + DataInput(name="data", display_name="Data", info="The data to convert to text."), + MultilineInput( + name="template", + display_name="Template", + info="The template to use for formatting the data. " + "It can contain the keys {text}, {data} or any other key in the Data.", + value="{text}", + ), + StrInput(name="sep", display_name="Separator", advanced=True, value="\n"), + ] + + outputs = [ + Output(display_name="Text", name="text", method="parse_data"), + ] + + def parse_data(self) -> Message: + data = self.data if isinstance(self.data, list) else [self.data] + template = self.template + + result_string = data_to_text(template, data, sep=self.sep) + self.status = result_string + return Message(text=result_string) diff --git a/src/backend/base/langflow/components/processing/parse_json_data.py b/src/backend/base/langflow/components/processing/parse_json_data.py new file mode 100644 index 000000000000..0881b0b94a43 --- /dev/null +++ b/src/backend/base/langflow/components/processing/parse_json_data.py @@ -0,0 +1,90 @@ +import json +from json import JSONDecodeError + +import jq +from json_repair import repair_json +from loguru import logger + +from langflow.custom import Component +from langflow.inputs import HandleInput, MessageTextInput +from langflow.io import Output +from langflow.schema import Data +from langflow.schema.message import Message + + +class ParseJSONDataComponent(Component): + display_name = "Parse JSON" + description = "Convert and extract JSON fields." + icon = "braces" + name = "ParseJSONData" + legacy: bool = True + + inputs = [ + HandleInput( + name="input_value", + display_name="Input", + info="Data object to filter.", + required=True, + input_types=["Message", "Data"], + ), + MessageTextInput( + name="query", + display_name="JQ Query", + info="JQ Query to filter the data. The input is always a JSON list.", + required=True, + ), + ] + + outputs = [ + Output(display_name="Filtered Data", name="filtered_data", method="filter_data"), + ] + + def _parse_data(self, input_value) -> str: + if isinstance(input_value, Message) and isinstance(input_value.text, str): + return input_value.text + if isinstance(input_value, Data): + return json.dumps(input_value.data) + return str(input_value) + + def filter_data(self) -> list[Data]: + to_filter = self.input_value + if not to_filter: + return [] + # Check if input is a list + if isinstance(to_filter, list): + to_filter = [self._parse_data(f) for f in to_filter] + else: + to_filter = self._parse_data(to_filter) + + # If input is not a list, don't wrap it in a list + if not isinstance(to_filter, list): + to_filter = repair_json(to_filter) + try: + to_filter_as_dict = json.loads(to_filter) + except JSONDecodeError: + try: + to_filter_as_dict = json.loads(repair_json(to_filter)) + except JSONDecodeError as e: + msg = f"Invalid JSON: {e}" + raise ValueError(msg) from e + else: + to_filter = [repair_json(f) for f in to_filter] + to_filter_as_dict = [] + for f in to_filter: + try: + to_filter_as_dict.append(json.loads(f)) + except JSONDecodeError: + try: + to_filter_as_dict.append(json.loads(repair_json(f))) + except JSONDecodeError as e: + msg = f"Invalid JSON: {e}" + raise ValueError(msg) from e + to_filter = to_filter_as_dict + + full_filter_str = json.dumps(to_filter_as_dict) + + logger.info("to_filter: ", to_filter) + + results = jq.compile(self.query).input_text(full_filter_str).all() + logger.info("results: ", results) + return [Data(data=value) if isinstance(value, dict) else Data(text=str(value)) for value in results] diff --git a/src/backend/base/langflow/components/processing/select_data.py b/src/backend/base/langflow/components/processing/select_data.py new file mode 100644 index 000000000000..577c1d4fc812 --- /dev/null +++ b/src/backend/base/langflow/components/processing/select_data.py @@ -0,0 +1,48 @@ +from langflow.custom import Component +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs.inputs import DataInput, IntInput +from langflow.io import Output +from langflow.schema import Data + + +class SelectDataComponent(Component): + display_name: str = "Select Data" + description: str = "Select a single data from a list of data." + name: str = "SelectData" + icon = "prototypes" + legacy = True + + inputs = [ + DataInput( + name="data_list", + display_name="Data List", + info="List of data to select from.", + is_list=True, # Specify that this input takes a list of Data objects + ), + IntInput( + name="data_index", + display_name="Data Index", + info="Index of the data to select.", + value=0, # Will be populated dynamically based on the length of data_list + range_spec=RangeSpec(min=0, max=15, step=1, step_type="int"), + ), + ] + + outputs = [ + Output(display_name="Selected Data", name="selected_data", method="select_data"), + ] + + async def select_data(self) -> Data: + # Retrieve the selected index from the dropdown + selected_index = int(self.data_index) + # Get the data list + + # Validate that the selected index is within bounds + if selected_index < 0 or selected_index >= len(self.data_list): + msg = f"Selected index {selected_index} is out of range." + raise ValueError(msg) + + # Return the selected Data object + selected_data = self.data_list[selected_index] + self.status = selected_data # Update the component status to reflect the selected data + return selected_data diff --git a/src/backend/base/langflow/components/processing/split_text.py b/src/backend/base/langflow/components/processing/split_text.py new file mode 100644 index 000000000000..36157538e0e8 --- /dev/null +++ b/src/backend/base/langflow/components/processing/split_text.py @@ -0,0 +1,63 @@ +from langchain_text_splitters import CharacterTextSplitter + +from langflow.custom import Component +from langflow.io import HandleInput, IntInput, MessageTextInput, Output +from langflow.schema import Data +from langflow.utils.util import unescape_string + + +class SplitTextComponent(Component): + display_name: str = "Split Text" + description: str = "Split text into chunks based on specified criteria." + icon = "scissors-line-dashed" + name = "SplitText" + + inputs = [ + HandleInput( + name="data_inputs", + display_name="Data Inputs", + info="The data to split.", + input_types=["Data"], + is_list=True, + ), + IntInput( + name="chunk_overlap", + display_name="Chunk Overlap", + info="Number of characters to overlap between chunks.", + value=200, + ), + IntInput( + name="chunk_size", + display_name="Chunk Size", + info="The maximum number of characters in each chunk.", + value=1000, + ), + MessageTextInput( + name="separator", + display_name="Separator", + info="The character to split on. Defaults to newline.", + value="\n", + ), + ] + + outputs = [ + Output(display_name="Chunks", name="chunks", method="split_text"), + ] + + def _docs_to_data(self, docs): + return [Data(text=doc.page_content, data=doc.metadata) for doc in docs] + + def split_text(self) -> list[Data]: + separator = unescape_string(self.separator) + + documents = [_input.to_lc_document() for _input in self.data_inputs if isinstance(_input, Data)] + + splitter = CharacterTextSplitter( + chunk_overlap=self.chunk_overlap, + chunk_size=self.chunk_size, + separator=separator, + ) + docs = splitter.split_documents(documents) + data = self._docs_to_data(docs) + self.status = data + return data diff --git a/src/backend/base/langflow/components/processing/update_data.py b/src/backend/base/langflow/components/processing/update_data.py new file mode 100644 index 000000000000..007d6d721617 --- /dev/null +++ b/src/backend/base/langflow/components/processing/update_data.py @@ -0,0 +1,157 @@ +from typing import Any + +from langflow.custom import Component +from langflow.field_typing.range_spec import RangeSpec +from langflow.inputs.inputs import ( + BoolInput, + DataInput, + DictInput, + IntInput, + MessageTextInput, +) +from langflow.io import Output +from langflow.schema import Data +from langflow.schema.dotdict import dotdict + + +class UpdateDataComponent(Component): + display_name: str = "Update Data" + description: str = "Dynamically update or append data with the specified fields." + name: str = "UpdateData" + MAX_FIELDS = 15 # Define a constant for maximum number of fields + + inputs = [ + DataInput( + name="old_data", + display_name="Data", + info="The record to update.", + is_list=True, # Changed to True to handle list of Data objects + ), + IntInput( + name="number_of_fields", + display_name="Number of Fields", + info="Number of fields to be added to the record.", + real_time_refresh=True, + value=0, + range_spec=RangeSpec(min=1, max=MAX_FIELDS, step=1, step_type="int"), + ), + MessageTextInput( + name="text_key", + display_name="Text Key", + info="Key that identifies the field to be used as the text content.", + advanced=True, + ), + BoolInput( + name="text_key_validator", + display_name="Text Key Validator", + advanced=True, + info="If enabled, checks if the given 'Text Key' is present in the given 'Data'.", + ), + ] + + outputs = [ + Output(display_name="Data", name="data", method="build_data"), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + """Update the build configuration when the number of fields changes. + + Args: + build_config (dotdict): The current build configuration. + field_value (Any): The new value for the field. + field_name (Optional[str]): The name of the field being updated. + """ + if field_name == "number_of_fields": + default_keys = { + "code", + "_type", + "number_of_fields", + "text_key", + "old_data", + "text_key_validator", + } + try: + field_value_int = int(field_value) + except ValueError: + return build_config + + if field_value_int > self.MAX_FIELDS: + build_config["number_of_fields"]["value"] = self.MAX_FIELDS + msg = f"Number of fields cannot exceed {self.MAX_FIELDS}. " "Try using a Component to combine two Data." + raise ValueError(msg) + + existing_fields = {} + # Back up the existing template fields + for key in list(build_config.keys()): + if key not in default_keys: + existing_fields[key] = build_config.pop(key) + + for i in range(1, field_value_int + 1): + key = f"field_{i}_key" + if key in existing_fields: + field = existing_fields[key] + build_config[key] = field + else: + field = DictInput( + display_name=f"Field {i}", + name=key, + info=f"Key for field {i}.", + input_types=["Text", "Data"], + ) + build_config[field.name] = field.to_dict() + + build_config["number_of_fields"]["value"] = field_value_int + return build_config + + async def build_data(self) -> Data | list[Data]: + """Build the updated data by combining the old data with new fields.""" + new_data = self.get_data() + if isinstance(self.old_data, list): + for data_item in self.old_data: + if not isinstance(data_item, Data): + continue # Skip invalid items + data_item.data.update(new_data) + if self.text_key: + data_item.text_key = self.text_key + self.validate_text_key(data_item) + self.status = self.old_data + return self.old_data # Returns List[Data] + if isinstance(self.old_data, Data): + self.old_data.data.update(new_data) + if self.text_key: + self.old_data.text_key = self.text_key + self.status = self.old_data + self.validate_text_key(self.old_data) + return self.old_data # Returns Data + msg = "old_data is not a Data object or list of Data objects." + raise ValueError(msg) + + def get_data(self): + """Function to get the Data from the attributes.""" + data = {} + default_keys = { + "code", + "_type", + "number_of_fields", + "text_key", + "old_data", + "text_key_validator", + } + for attr_name, attr_value in self._attributes.items(): + if attr_name in default_keys: + continue # Skip default attributes + if isinstance(attr_value, dict): + for key, value in attr_value.items(): + data[key] = value.get_text() if isinstance(value, Data) else value + elif isinstance(attr_value, Data): + data[attr_name] = attr_value.get_text() + else: + data[attr_name] = attr_value + return data + + def validate_text_key(self, data: Data) -> None: + """This function validates that the Text Key is one of the keys in the Data.""" + data_keys = data.data.keys() + if self.text_key and self.text_key not in data_keys: + msg = f"Text Key: '{self.text_key}' not found in the Data keys: " f"{', '.join(data_keys)}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py b/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py deleted file mode 100644 index 47c1ebec8526..000000000000 --- a/src/backend/base/langflow/components/prompts/LangChainHubPrompt.py +++ /dev/null @@ -1,109 +0,0 @@ -from typing import List - -from langflow.custom import Component -from langflow.inputs import StrInput, SecretStrInput, DefaultPromptField -from langflow.io import Output -from langflow.schema.message import Message - - -import re - - -class LangChainHubPromptComponent(Component): - display_name: str = "LangChain Hub Prompt Component" - description: str = "Prompt Component that uses LangChain Hub prompts" - beta = True - icon = "prompts" - trace_type = "prompt" - name = "LangChain Hub Prompt" - - inputs = [ - SecretStrInput( - name="langchain_api_key", - display_name="Your LangChain API Key", - info="The LangChain API Key to use.", - ), - StrInput( - name="langchain_hub_prompt", - display_name="LangChain Hub Prompt", - info="The LangChain Hub prompt to use.", - value="efriis/my-first-prompt", - refresh_button=True, - ), - ] - - outputs = [ - Output(display_name="Build Prompt", name="prompt", method="build_prompt"), - ] - - def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None): - if field_name == "langchain_hub_prompt": - template = self._fetch_langchain_hub_template() - - # Extract the messages from the prompt data - prompt_template = [] - for message_data in template.messages: - prompt_template.append(message_data.prompt) - - # Regular expression to find all instances of {} - pattern = r"\{(.*?)\}" - - # Get all the custom fields - custom_fields: List[str] = [] - full_template = "" - for message in prompt_template: - # Find all matches - matches = re.findall(pattern, message.template) - custom_fields = custom_fields + matches - - # Create a string version of the full template - full_template = full_template + "\n" + message.template - - # No need to reprocess if we have them already - if all(["param_" + custom_field in build_config for custom_field in custom_fields]): - return build_config - - # Easter egg: Show template in info popup - build_config["langchain_hub_prompt"]["info"] = full_template - - # Remove old parameter inputs if any - for key, _ in build_config.copy().items(): - if key.startswith("param_"): - del build_config[key] - - # Now create inputs for each - for custom_field in custom_fields: - new_parameter = DefaultPromptField( - name=f"param_{custom_field}", - display_name=custom_field, - info="Fill in the value for {" + custom_field + "}", - ).to_dict() - - build_config[f"param_{custom_field}"] = new_parameter - - return build_config - - async def build_prompt( - self, - ) -> Message: - # Get the parameters that - template = self._fetch_langchain_hub_template() # TODO: doing this twice - original_params = {k[6:] if k.startswith("param_") else k: v for k, v in self._attributes.items()} - prompt_value = template.invoke(original_params) - - original_params["template"] = prompt_value.to_string() - - # Now pass the filtered attributes to the function - prompt = await Message.from_template_and_variables(**original_params) - - self.status = prompt.text - - return prompt - - def _fetch_langchain_hub_template(self): - import langchain.hub - - # Pull the prompt from LangChain Hub - prompt_data = langchain.hub.pull(self.langchain_hub_prompt, api_key=self.langchain_api_key) - - return prompt_data diff --git a/src/backend/base/langflow/components/prompts/Prompt.py b/src/backend/base/langflow/components/prompts/Prompt.py deleted file mode 100644 index 512398b0d38d..000000000000 --- a/src/backend/base/langflow/components/prompts/Prompt.py +++ /dev/null @@ -1,62 +0,0 @@ -from langflow.base.prompts.api_utils import process_prompt_template -from langflow.custom import Component -from langflow.inputs.inputs import DefaultPromptField -from langflow.io import Output, PromptInput -from langflow.schema.message import Message -from langflow.template.utils import update_template_values - - -class PromptComponent(Component): - display_name: str = "Prompt" - description: str = "Create a prompt template with dynamic variables." - icon = "prompts" - trace_type = "prompt" - name = "Prompt" - - inputs = [ - PromptInput(name="template", display_name="Template"), - ] - - outputs = [ - Output(display_name="Prompt Message", name="prompt", method="build_prompt"), - ] - - async def build_prompt( - self, - ) -> Message: - prompt = await Message.from_template_and_variables(**self._attributes) - self.status = prompt.text - return prompt - - def _update_template(self, frontend_node: dict): - prompt_template = frontend_node["template"]["template"]["value"] - custom_fields = frontend_node["custom_fields"] - frontend_node_template = frontend_node["template"] - _ = process_prompt_template( - template=prompt_template, - name="template", - custom_fields=custom_fields, - frontend_node_template=frontend_node_template, - ) - return frontend_node - - def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): - """ - This function is called after the code validation is done. - """ - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - template = frontend_node["template"]["template"]["value"] - # Kept it duplicated for backwards compatibility - _ = process_prompt_template( - template=template, - name="template", - custom_fields=frontend_node["custom_fields"], - frontend_node_template=frontend_node["template"], - ) - # Now that template is updated, we need to grab any values that were set in the current_frontend_node - # and update the frontend_node with those values - update_template_values(new_template=frontend_node, previous_template=current_frontend_node["template"]) - return frontend_node - - def _get_fallback_input(self, **kwargs): - return DefaultPromptField(**kwargs) diff --git a/src/backend/base/langflow/components/prompts/__init__.py b/src/backend/base/langflow/components/prompts/__init__.py index 23179872737b..89e6cec0f61a 100644 --- a/src/backend/base/langflow/components/prompts/__init__.py +++ b/src/backend/base/langflow/components/prompts/__init__.py @@ -1,4 +1,3 @@ -from .Prompt import PromptComponent -from .LangChainHubPrompt import LangChainHubPromptComponent +from .prompt import PromptComponent -__all__ = ["PromptComponent", "LangChainHubPromptComponent"] +__all__ = ["PromptComponent"] diff --git a/src/backend/base/langflow/components/prompts/prompt.py b/src/backend/base/langflow/components/prompts/prompt.py new file mode 100644 index 000000000000..01646b14d104 --- /dev/null +++ b/src/backend/base/langflow/components/prompts/prompt.py @@ -0,0 +1,58 @@ +from langflow.base.prompts.api_utils import process_prompt_template +from langflow.custom import Component +from langflow.inputs.inputs import DefaultPromptField +from langflow.io import Output, PromptInput +from langflow.schema.message import Message +from langflow.template.utils import update_template_values + + +class PromptComponent(Component): + display_name: str = "Prompt" + description: str = "Create a prompt template with dynamic variables." + icon = "prompts" + trace_type = "prompt" + name = "Prompt" + + inputs = [ + PromptInput(name="template", display_name="Template"), + ] + + outputs = [ + Output(display_name="Prompt Message", name="prompt", method="build_prompt"), + ] + + async def build_prompt(self) -> Message: + prompt = Message.from_template(**self._attributes) + self.status = prompt.text + return prompt + + def _update_template(self, frontend_node: dict): + prompt_template = frontend_node["template"]["template"]["value"] + custom_fields = frontend_node["custom_fields"] + frontend_node_template = frontend_node["template"] + _ = process_prompt_template( + template=prompt_template, + name="template", + custom_fields=custom_fields, + frontend_node_template=frontend_node_template, + ) + return frontend_node + + def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): + """This function is called after the code validation is done.""" + frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) + template = frontend_node["template"]["template"]["value"] + # Kept it duplicated for backwards compatibility + _ = process_prompt_template( + template=template, + name="template", + custom_fields=frontend_node["custom_fields"], + frontend_node_template=frontend_node["template"], + ) + # Now that template is updated, we need to grab any values that were set in the current_frontend_node + # and update the frontend_node with those values + update_template_values(new_template=frontend_node, previous_template=current_frontend_node["template"]) + return frontend_node + + def _get_fallback_input(self, **kwargs): + return DefaultPromptField(**kwargs) diff --git a/src/backend/base/langflow/components/prototypes/ConditionalRouter.py b/src/backend/base/langflow/components/prototypes/ConditionalRouter.py deleted file mode 100644 index 133a84867e30..000000000000 --- a/src/backend/base/langflow/components/prototypes/ConditionalRouter.py +++ /dev/null @@ -1,83 +0,0 @@ -from langflow.custom import Component -from langflow.io import BoolInput, DropdownInput, MessageInput, MessageTextInput, Output -from langflow.schema.message import Message - - -class ConditionalRouterComponent(Component): - display_name = "Conditional Router" - description = "Routes an input message to a corresponding output based on text comparison." - icon = "equal" - name = "ConditionalRouter" - - inputs = [ - MessageTextInput( - name="input_text", - display_name="Input Text", - info="The primary text input for the operation.", - ), - MessageTextInput( - name="match_text", - display_name="Match Text", - info="The text input to compare against.", - ), - DropdownInput( - name="operator", - display_name="Operator", - options=["equals", "not equals", "contains", "starts with", "ends with"], - info="The operator to apply for comparing the texts.", - value="equals", - advanced=True, - ), - BoolInput( - name="case_sensitive", - display_name="Case Sensitive", - info="If true, the comparison will be case sensitive.", - value=False, - advanced=True, - ), - MessageInput( - name="message", - display_name="Message", - info="The message to pass through either route.", - ), - ] - - outputs = [ - Output(display_name="True Route", name="true_result", method="true_response"), - Output(display_name="False Route", name="false_result", method="false_response"), - ] - - def evaluate_condition(self, input_text: str, match_text: str, operator: str, case_sensitive: bool) -> bool: - if not case_sensitive: - input_text = input_text.lower() - match_text = match_text.lower() - - if operator == "equals": - return input_text == match_text - elif operator == "not equals": - return input_text != match_text - elif operator == "contains": - return match_text in input_text - elif operator == "starts with": - return input_text.startswith(match_text) - elif operator == "ends with": - return input_text.endswith(match_text) - return False - - def true_response(self) -> Message: - result = self.evaluate_condition(self.input_text, self.match_text, self.operator, self.case_sensitive) - if result: - self.status = self.message - return self.message - else: - self.stop("true_result") - return None # type: ignore - - def false_response(self) -> Message: - result = self.evaluate_condition(self.input_text, self.match_text, self.operator, self.case_sensitive) - if not result: - self.status = self.message - return self.message - else: - self.stop("false_result") - return None # type: ignore diff --git a/src/backend/base/langflow/components/prototypes/CreateData.py b/src/backend/base/langflow/components/prototypes/CreateData.py deleted file mode 100644 index 5ece555edd75..000000000000 --- a/src/backend/base/langflow/components/prototypes/CreateData.py +++ /dev/null @@ -1,89 +0,0 @@ -from typing import Any - -from langflow.custom import Component -from langflow.inputs.inputs import IntInput, MessageTextInput, DictInput -from langflow.io import Output - -from langflow.field_typing.range_spec import RangeSpec -from langflow.schema import Data -from langflow.schema.dotdict import dotdict - - -class CreateDataComponent(Component): - display_name: str = "Create Data" - description: str = "Dynamically create a Data with a specified number of fields." - name: str = "CreateData" - - inputs = [ - IntInput( - name="number_of_fields", - display_name="Number of Fields", - info="Number of fields to be added to the record.", - real_time_refresh=True, - value=0, - range_spec=RangeSpec(min=1, max=15, step=1, step_type="int"), - ), - MessageTextInput(name="text_key", display_name="Text Key", info="Key to be used as text.", advanced=True), - ] - - outputs = [ - Output(display_name="Data", name="data", method="build_data"), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "number_of_fields": - default_keys = ["code", "_type", "number_of_fields", "text_key"] - try: - field_value_int = int(field_value) - except ValueError: - return build_config - existing_fields = {} - if field_value_int > 15: - build_config["number_of_fields"]["value"] = 15 - raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Data.") - if len(build_config) > len(default_keys): - # back up the existing template fields - for key in build_config.copy(): - if key not in default_keys: - existing_fields[key] = build_config.pop(key) - - for i in range(1, field_value_int + 1): - key = f"field_{i}_key" - if key in existing_fields: - field = existing_fields[key] - build_config[key] = field - else: - field = DictInput( - display_name=f"Field {i}", - name=key, - info=f"Key for field {i}.", - input_types=["Text", "Data"], - ) - build_config[field.name] = field.to_dict() - - build_config["number_of_fields"]["value"] = field_value_int - return build_config - - async def build_data(self) -> Data: - data = {} - for value_dict in self._attributes.values(): - if isinstance(value_dict, dict): - # Check if the value of the value_dict is a Data - value_dict = { - key: value.get_text() if isinstance(value, Data) else value for key, value in value_dict.items() - } - data.update(value_dict) - return_data = Data(data=data, text_key=self.text_key) - self.status = return_data - return return_data - - def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): - """ - This function is called after the code validation is done. - """ - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - frontend_node["template"] = self.update_build_config( - frontend_node["template"], frontend_node["template"]["number_of_fields"]["value"], "number_of_fields" - ) - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - return frontend_node diff --git a/src/backend/base/langflow/components/prototypes/FlowTool.py b/src/backend/base/langflow/components/prototypes/FlowTool.py deleted file mode 100644 index 0498140ae9a7..000000000000 --- a/src/backend/base/langflow/components/prototypes/FlowTool.py +++ /dev/null @@ -1,95 +0,0 @@ -from typing import Any, List, Optional - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.base.tools.flow_tool import FlowTool -from langflow.field_typing import Tool -from langflow.graph.graph.base import Graph -from langflow.helpers.flow import get_flow_inputs -from langflow.io import BoolInput, DropdownInput, Output, StrInput -from langflow.schema import Data -from langflow.schema.dotdict import dotdict - - -class FlowToolComponent(LCToolComponent): - display_name = "Flow as Tool" - description = "Construct a Tool from a function that runs the loaded Flow." - field_order = ["flow_name", "name", "description", "return_direct"] - trace_type = "tool" - name = "FlowTool" - beta = True - - def get_flow_names(self) -> List[str]: - flow_datas = self.list_flows() - return [flow_data.data["name"] for flow_data in flow_datas] - - def get_flow(self, flow_name: str) -> Optional[Data]: - """ - Retrieves a flow by its name. - - Args: - flow_name (str): The name of the flow to retrieve. - - Returns: - Optional[Text]: The flow record if found, None otherwise. - """ - flow_datas = self.list_flows() - for flow_data in flow_datas: - if flow_data.data["name"] == flow_name: - return flow_data - return None - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "flow_name": - build_config["flow_name"]["options"] = self.get_flow_names() - - return build_config - - inputs = [ - DropdownInput( - name="flow_name", display_name="Flow Name", info="The name of the flow to run.", refresh_button=True - ), - StrInput( - name="name", - display_name="Name", - info="The name of the tool.", - ), - StrInput( - name="description", - display_name="Description", - info="The description of the tool.", - ), - BoolInput( - name="return_direct", - display_name="Return Direct", - info="Return the result directly from the Tool.", - advanced=True, - ), - ] - - outputs = [ - Output(name="api_build_tool", display_name="Tool", method="build_tool"), - ] - - def build_tool(self) -> Tool: - FlowTool.update_forward_refs() - if "flow_name" not in self._attributes or not self._attributes["flow_name"]: - raise ValueError("Flow name is required") - flow_name = self._attributes["flow_name"] - flow_data = self.get_flow(flow_name) - if not flow_data: - raise ValueError("Flow not found.") - graph = Graph.from_payload(flow_data.data["data"]) - inputs = get_flow_inputs(graph) - tool = FlowTool( - name=self.name, - description=self.description, - graph=graph, - return_direct=self.return_direct, - inputs=inputs, - flow_id=str(flow_data.id), - user_id=str(self.user_id), - ) - description_repr = repr(tool.description).strip("'") - args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()]) - self.status = f"{description_repr}\nArguments:\n{args_str}" - return tool # type: ignore diff --git a/src/backend/base/langflow/components/prototypes/JSONCleaner.py b/src/backend/base/langflow/components/prototypes/JSONCleaner.py deleted file mode 100644 index d3f1a7ac76a3..000000000000 --- a/src/backend/base/langflow/components/prototypes/JSONCleaner.py +++ /dev/null @@ -1,93 +0,0 @@ -import json -import re -import unicodedata -from langflow.custom import Component -from langflow.inputs import MessageTextInput, BoolInput -from langflow.template import Output -from langflow.schema.message import Message - - -class JSONCleaner(Component): - display_name = "JSON Cleaner" - description = "Cleans the messy and sometimes incorrect JSON strings produced by LLMs so that they are fully compliant with the JSON spec." - icon = "custom_components" - - inputs = [ - MessageTextInput( - name="json_str", display_name="JSON String", info="The JSON string to be cleaned.", required=True - ), - BoolInput( - name="remove_control_chars", - display_name="Remove Control Characters", - info="Remove control characters from the JSON string.", - required=False, - ), - BoolInput( - name="normalize_unicode", - display_name="Normalize Unicode", - info="Normalize Unicode characters in the JSON string.", - required=False, - ), - BoolInput( - name="validate_json", - display_name="Validate JSON", - info="Validate the JSON string to ensure it is well-formed.", - required=False, - ), - ] - - outputs = [ - Output(display_name="Cleaned JSON String", name="output", method="clean_json"), - ] - - def clean_json(self) -> Message: - try: - from json_repair import repair_json # type: ignore - except ImportError: - raise ImportError( - "Could not import the json_repair package." "Please install it with `pip install json_repair`." - ) - - """Clean the input JSON string based on provided options and return the cleaned JSON string.""" - json_str = self.json_str - remove_control_chars = self.remove_control_chars - normalize_unicode = self.normalize_unicode - validate_json = self.validate_json - - try: - start = json_str.find("{") - end = json_str.rfind("}") - if start == -1 or end == -1: - raise ValueError("Invalid JSON string: Missing '{' or '}'") - json_str = json_str[start : end + 1] - - if remove_control_chars: - json_str = self._remove_control_characters(json_str) - if normalize_unicode: - json_str = self._normalize_unicode(json_str) - if validate_json: - json_str = self._validate_json(json_str) - - cleaned_json_str = repair_json(json_str) - result = str(cleaned_json_str) - - self.status = result - return Message(text=result) - except Exception as e: - raise ValueError(f"Error cleaning JSON string: {str(e)}") - - def _remove_control_characters(self, s: str) -> str: - """Remove control characters from the string.""" - return re.sub(r"[\x00-\x1F\x7F]", "", s) - - def _normalize_unicode(self, s: str) -> str: - """Normalize Unicode characters in the string.""" - return unicodedata.normalize("NFC", s) - - def _validate_json(self, s: str) -> str: - """Validate the JSON string.""" - try: - json.loads(s) - return s - except json.JSONDecodeError as e: - raise ValueError(f"Invalid JSON string: {str(e)}") diff --git a/src/backend/base/langflow/components/prototypes/Listen.py b/src/backend/base/langflow/components/prototypes/Listen.py deleted file mode 100644 index e75ec070b8ac..000000000000 --- a/src/backend/base/langflow/components/prototypes/Listen.py +++ /dev/null @@ -1,28 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class ListenComponent(CustomComponent): - display_name = "Listen" - description = "A component to listen for a notification." - name = "Listen" - beta: bool = True - - def build_config(self): - return { - "name": { - "display_name": "Name", - "info": "The name of the notification to listen for.", - }, - } - - def build(self, name: str) -> Data: - state = self.get_state(name) - self._set_successors_ids() - self.status = state - return state - - def _set_successors_ids(self): - self._vertex.is_state = True - successors = self._vertex.graph.successor_map.get(self._vertex.id, []) - return successors + self._vertex.graph.activated_vertices diff --git a/src/backend/base/langflow/components/prototypes/Notify.py b/src/backend/base/langflow/components/prototypes/Notify.py deleted file mode 100644 index 72287a25509b..000000000000 --- a/src/backend/base/langflow/components/prototypes/Notify.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Optional - -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class NotifyComponent(CustomComponent): - display_name = "Notify" - description = "A component to generate a notification to Get Notified component." - icon = "Notify" - name = "Notify" - beta: bool = True - - def build_config(self): - return { - "name": {"display_name": "Name", "info": "The name of the notification."}, - "data": {"display_name": "Data", "info": "The data to store."}, - "append": { - "display_name": "Append", - "info": "If True, the record will be appended to the notification.", - }, - } - - def build(self, name: str, data: Optional[Data] = None, append: bool = False) -> Data: - if data and not isinstance(data, Data): - if isinstance(data, str): - data = Data(text=data) - elif isinstance(data, dict): - data = Data(data=data) - else: - data = Data(text=str(data)) - elif not data: - data = Data(text="") - if data: - if append: - self.append_state(name, data) - else: - self.update_state(name, data) - else: - self.status = "No record provided." - self.status = data - self._set_successors_ids() - return data - - def _set_successors_ids(self): - self._vertex.is_state = True - successors = self._vertex.graph.successor_map.get(self._vertex.id, []) - return successors + self._vertex.graph.activated_vertices diff --git a/src/backend/base/langflow/components/prototypes/PythonFunction.py b/src/backend/base/langflow/components/prototypes/PythonFunction.py deleted file mode 100644 index bc5576f89c33..000000000000 --- a/src/backend/base/langflow/components/prototypes/PythonFunction.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Callable - -from langflow.custom import CustomComponent -from langflow.custom.utils import get_function -from langflow.field_typing import Code - - -class PythonFunctionComponent(CustomComponent): - display_name = "Python Function" - description = "Define a Python function." - icon = "Python" - name = "PythonFunction" - beta = True - - def build_config(self): - return { - "function_code": { - "display_name": "Code", - "info": "The code for the function.", - "show": True, - }, - } - - def build(self, function_code: Code) -> Callable: - self.status = function_code - func = get_function(function_code) - return func diff --git a/src/backend/base/langflow/components/prototypes/RunFlow.py b/src/backend/base/langflow/components/prototypes/RunFlow.py deleted file mode 100644 index 11c997696621..000000000000 --- a/src/backend/base/langflow/components/prototypes/RunFlow.py +++ /dev/null @@ -1,67 +0,0 @@ -from typing import Any, List, Optional - -from langflow.base.flow_processing.utils import build_data_from_run_outputs -from langflow.custom import Component -from langflow.graph.schema import RunOutputs -from langflow.io import DropdownInput, MessageTextInput, NestedDictInput, Output -from langflow.schema import Data, dotdict - - -class RunFlowComponent(Component): - display_name = "Run Flow" - description = "A component to run a flow." - name = "RunFlow" - beta: bool = True - - def get_flow_names(self) -> List[str]: - flow_data = self.list_flows() - return [flow_data.data["name"] for flow_data in flow_data] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "flow_name": - build_config["flow_name"]["options"] = self.get_flow_names() - - return build_config - - inputs = [ - MessageTextInput( - name="input_value", - display_name="Input Value", - info="The input value to be processed by the flow.", - ), - DropdownInput( - name="flow_name", - display_name="Flow Name", - info="The name of the flow to run.", - options=[], - refresh_button=True, - ), - NestedDictInput( - name="tweaks", - display_name="Tweaks", - info="Tweaks to apply to the flow.", - ), - ] - - outputs = [ - Output(display_name="Run Outputs", name="run_outputs", method="generate_results"), - ] - - async def generate_results(self) -> List[Data]: - if "flow_name" not in self._attributes or not self._attributes["flow_name"]: - raise ValueError("Flow name is required") - flow_name = self._attributes["flow_name"] - - results: List[Optional[RunOutputs]] = await self.run_flow( - inputs={"input_value": self.input_value}, flow_name=flow_name, tweaks=self.tweaks - ) - if isinstance(results, list): - data = [] - for result in results: - if result: - data.extend(build_data_from_run_outputs(result)) - else: - data = build_data_from_run_outputs()(results) - - self.status = data - return data diff --git a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py b/src/backend/base/langflow/components/prototypes/RunnableExecutor.py deleted file mode 100644 index 0e872080afbf..000000000000 --- a/src/backend/base/langflow/components/prototypes/RunnableExecutor.py +++ /dev/null @@ -1,137 +0,0 @@ -from langflow.custom import Component -from langflow.inputs import HandleInput, MessageTextInput, BoolInput -from langflow.schema.message import Message -from langflow.template import Output -from langchain.agents import AgentExecutor - - -class RunnableExecComponent(Component): - description = "Execute a runnable. It will try to guess the input and output keys." - display_name = "Runnable Executor" - name = "RunnableExecutor" - beta: bool = True - - inputs = [ - MessageTextInput(name="input_value", display_name="Input", required=True), - HandleInput( - name="runnable", - display_name="Agent Executor", - input_types=["Chain", "AgentExecutor", "Agent", "Runnable"], - required=True, - ), - MessageTextInput( - name="input_key", - display_name="Input Key", - value="input", - advanced=True, - ), - MessageTextInput( - name="output_key", - display_name="Output Key", - value="output", - advanced=True, - ), - BoolInput( - name="use_stream", - display_name="Stream", - value=False, - ), - ] - - outputs = [ - Output( - display_name="Text", - name="text", - method="build_executor", - ), - ] - - def get_output(self, result, input_key, output_key): - """ - Retrieves the output value from the given result dictionary based on the specified input and output keys. - - Args: - result (dict): The result dictionary containing the output value. - input_key (str): The key used to retrieve the input value from the result dictionary. - output_key (str): The key used to retrieve the output value from the result dictionary. - - Returns: - tuple: A tuple containing the output value and the status message. - - """ - possible_output_keys = ["answer", "response", "output", "result", "text"] - status = "" - result_value = None - - if output_key in result: - result_value = result.get(output_key) - elif len(result) == 2 and input_key in result: - # get the other key from the result dict - other_key = [k for k in result if k != input_key][0] - if other_key == output_key: - result_value = result.get(output_key) - else: - status += f"Warning: The output key is not '{output_key}'. The output key is '{other_key}'." - result_value = result.get(other_key) - elif len(result) == 1: - result_value = list(result.values())[0] - elif any(k in result for k in possible_output_keys): - for key in possible_output_keys: - if key in result: - result_value = result.get(key) - status += f"Output key: '{key}'." - break - if result_value is None: - result_value = result - status += f"Warning: The output key is not '{output_key}'." - else: - result_value = result - status += f"Warning: The output key is not '{output_key}'." - - return result_value, status - - def get_input_dict(self, runnable, input_key, input_value): - """ - Returns a dictionary containing the input key-value pair for the given runnable. - - Args: - runnable: The runnable object. - input_key: The key for the input value. - input_value: The value for the input key. - - Returns: - input_dict: A dictionary containing the input key-value pair. - status: A status message indicating if the input key is not in the runnable's input keys. - """ - input_dict = {} - status = "" - if hasattr(runnable, "input_keys"): - # Check if input_key is in the runnable's input_keys - if input_key in runnable.input_keys: - input_dict[input_key] = input_value - else: - input_dict = {k: input_value for k in runnable.input_keys} - status = f"Warning: The input key is not '{input_key}'. The input key is '{runnable.input_keys}'." - return input_dict, status - - async def build_executor(self) -> Message: - input_dict, status = self.get_input_dict(self.runnable, self.input_key, self.input_value) - if not isinstance(self.runnable, AgentExecutor): - raise ValueError("The runnable must be an AgentExecutor") - - if self.use_stream: - return self.astream_events(input_dict) - else: - result = await self.runnable.ainvoke(input_dict) - result_value, _status = self.get_output(result, self.input_key, self.output_key) - status += _status - status += f"\n\nOutput: {result_value}\n\nRaw Output: {result}" - self.status = status - return result_value - - async def astream_events(self, input): - async for event in self.runnable.astream_events(input, version="v1"): - if event.get("event") != "on_chat_model_stream": - continue - - yield event.get("data").get("chunk") diff --git a/src/backend/base/langflow/components/prototypes/SQLExecutor.py b/src/backend/base/langflow/components/prototypes/SQLExecutor.py deleted file mode 100644 index cafc92fb4095..000000000000 --- a/src/backend/base/langflow/components/prototypes/SQLExecutor.py +++ /dev/null @@ -1,70 +0,0 @@ -from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool -from langchain_community.utilities import SQLDatabase - -from langflow.custom import CustomComponent -from langflow.field_typing import Text - - -class SQLExecutorComponent(CustomComponent): - display_name = "SQL Executor" - description = "Execute SQL query." - name = "SQLExecutor" - beta: bool = True - - def build_config(self): - return { - "database_url": { - "display_name": "Database URL", - "info": "The URL of the database.", - }, - "include_columns": { - "display_name": "Include Columns", - "info": "Include columns in the result.", - }, - "passthrough": { - "display_name": "Passthrough", - "info": "If an error occurs, return the query instead of raising an exception.", - }, - "add_error": { - "display_name": "Add Error", - "info": "Add the error to the result.", - }, - } - - def clean_up_uri(self, uri: str) -> str: - if uri.startswith("postgresql://"): - uri = uri.replace("postgresql://", "postgres://") - return uri.strip() - - def build( - self, - query: str, - database_url: str, - include_columns: bool = False, - passthrough: bool = False, - add_error: bool = False, - ) -> Text: - error = None - try: - database = SQLDatabase.from_uri(database_url) - except Exception as e: - raise ValueError(f"An error occurred while connecting to the database: {e}") - try: - tool = QuerySQLDataBaseTool(db=database) - result = tool.run(query, include_columns=include_columns) - self.status = result - except Exception as e: - result = str(e) - self.status = result - if not passthrough: - raise e - error = repr(e) - - if add_error and error is not None: - result = f"{result}\n\nError: {error}\n\nQuery: {query}" - elif error is not None: - # Then we won't add the error to the result - # but since we are in passthrough mode, we will return the query - result = query - - return result diff --git a/src/backend/base/langflow/components/prototypes/SubFlow.py b/src/backend/base/langflow/components/prototypes/SubFlow.py deleted file mode 100644 index 4521a06ea998..000000000000 --- a/src/backend/base/langflow/components/prototypes/SubFlow.py +++ /dev/null @@ -1,107 +0,0 @@ -from typing import Any, List, Optional - -from loguru import logger - -from langflow.base.flow_processing.utils import build_data_from_result_data -from langflow.custom import Component -from langflow.graph.graph.base import Graph -from langflow.graph.vertex.base import Vertex -from langflow.helpers.flow import get_flow_inputs -from langflow.io import DropdownInput, Output -from langflow.schema import Data, dotdict - - -class SubFlowComponent(Component): - display_name = "Sub Flow" - description = "Generates a Component from a Flow, with all of its inputs, and " - name = "SubFlow" - beta: bool = True - - def get_flow_names(self) -> List[str]: - flow_data = self.list_flows() - return [flow_data.data["name"] for flow_data in flow_data] - - def get_flow(self, flow_name: str) -> Optional[Data]: - flow_datas = self.list_flows() - for flow_data in flow_datas: - if flow_data.data["name"] == flow_name: - return flow_data - return None - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "flow_name": - build_config["flow_name"]["options"] = self.get_flow_names() - - for key in list(build_config.keys()): - if key not in [x.name for x in self.inputs] + ["code", "_type", "get_final_results_only"]: - del build_config[key] - if field_value is not None and field_name == "flow_name": - try: - flow_data = self.get_flow(field_value) - if not flow_data: - raise ValueError(f"Flow {field_value} not found.") - graph = Graph.from_payload(flow_data.data["data"]) - # Get all inputs from the graph - inputs = get_flow_inputs(graph) - # Add inputs to the build config - build_config = self.add_inputs_to_build_config(inputs, build_config) - except Exception as e: - logger.error(f"Error getting flow {field_value}: {str(e)}") - - return build_config - - def add_inputs_to_build_config(self, inputs_vertex: List[Vertex], build_config: dotdict): - new_fields: list[dotdict] = [] - - for vertex in inputs_vertex: - new_vertex_inputs = [] - field_template = vertex.data["node"]["template"] - for inp in field_template.keys(): - if inp not in ["code", "_type"]: - field_template[inp]["display_name"] = ( - vertex.display_name + " - " + field_template[inp]["display_name"] - ) - field_template[inp]["name"] = vertex.id + "|" + inp - new_vertex_inputs.append(field_template[inp]) - new_fields += new_vertex_inputs - for field in new_fields: - build_config[field["name"]] = field - return build_config - - inputs = [ - DropdownInput( - name="flow_name", - display_name="Flow Name", - info="The name of the flow to run.", - options=[], - refresh_button=True, - real_time_refresh=True, - ), - ] - - outputs = [Output(name="flow_outputs", display_name="Flow Outputs", method="generate_results")] - - async def generate_results(self) -> List[Data]: - tweaks: dict = {} - for field in self._attributes.keys(): - if field != "flow_name": - [node, name] = field.split("|") - if node not in tweaks.keys(): - tweaks[node] = {} - tweaks[node][name] = self._attributes[field] - flow_name = self._attributes.get("flow_name") - run_outputs = await self.run_flow( - tweaks=tweaks, - flow_name=flow_name, - output_type="all", - ) - data: list[Data] = [] - if not run_outputs: - return data - run_output = run_outputs[0] - - if run_output is not None: - for output in run_output.outputs: - if output: - data.extend(build_data_from_result_data(output)) - return data diff --git a/src/backend/base/langflow/components/prototypes/UpdateData.py b/src/backend/base/langflow/components/prototypes/UpdateData.py deleted file mode 100644 index 10355d2df225..000000000000 --- a/src/backend/base/langflow/components/prototypes/UpdateData.py +++ /dev/null @@ -1,40 +0,0 @@ -from langflow.custom import CustomComponent -from langflow.schema import Data - - -class UpdateDataComponent(CustomComponent): - display_name = "Update Data" - description = "Update Data with text-based key/value pairs, similar to updating a Python dictionary." - name = "UpdateData" - - def build_config(self): - return { - "data": { - "display_name": "Data", - "info": "The record to update.", - }, - "new_data": { - "display_name": "New Data", - "info": "The new data to update the record with.", - "input_types": ["Text"], - }, - } - - def build( - self, - data: Data, - new_data: dict, - ) -> Data: - """ - Updates a record with new data. - - Args: - record (Data): The record to update. - new_data (dict): The new data to update the record with. - - Returns: - Data: The updated record. - """ - data.data.update(new_data) - self.status = data - return data diff --git a/src/backend/base/langflow/components/prototypes/__init__.py b/src/backend/base/langflow/components/prototypes/__init__.py index 252d1a1c5815..8ad61e02b796 100644 --- a/src/backend/base/langflow/components/prototypes/__init__.py +++ b/src/backend/base/langflow/components/prototypes/__init__.py @@ -1,27 +1,5 @@ -from .ConditionalRouter import ConditionalRouterComponent -from .FlowTool import FlowToolComponent -from .Listen import ListenComponent -from .Notify import NotifyComponent -from .Pass import PassMessageComponent -from .PythonFunction import PythonFunctionComponent -from .RunFlow import RunFlowComponent -from .RunnableExecutor import RunnableExecComponent -from .SQLExecutor import SQLExecutorComponent -from .SubFlow import SubFlowComponent -from .CreateData import CreateDataComponent -from .UpdateData import UpdateDataComponent +from .python_function import PythonFunctionComponent __all__ = [ - "ConditionalRouterComponent", - "FlowToolComponent", - "ListenComponent", - "NotifyComponent", - "PassMessageComponent", "PythonFunctionComponent", - "RunFlowComponent", - "RunnableExecComponent", - "SQLExecutorComponent", - "SubFlowComponent", - "CreateDataComponent", - "UpdateDataComponent", ] diff --git a/src/backend/base/langflow/components/prototypes/python_function.py b/src/backend/base/langflow/components/prototypes/python_function.py new file mode 100644 index 000000000000..cb415586e784 --- /dev/null +++ b/src/backend/base/langflow/components/prototypes/python_function.py @@ -0,0 +1,73 @@ +from collections.abc import Callable + +from loguru import logger + +from langflow.custom import Component +from langflow.custom.utils import get_function +from langflow.io import CodeInput, Output +from langflow.schema import Data, dotdict +from langflow.schema.message import Message + + +class PythonFunctionComponent(Component): + display_name = "Python Function" + description = "Define and execute a Python function that returns a Data object or a Message." + icon = "Python" + name = "PythonFunction" + legacy = True + + inputs = [ + CodeInput( + name="function_code", + display_name="Function Code", + info="The code for the function.", + ), + ] + + outputs = [ + Output( + name="function_output", + display_name="Function Callable", + method="get_function_callable", + ), + Output( + name="function_output_data", + display_name="Function Output (Data)", + method="execute_function_data", + ), + Output( + name="function_output_str", + display_name="Function Output (Message)", + method="execute_function_message", + ), + ] + + def get_function_callable(self) -> Callable: + function_code = self.function_code + self.status = function_code + return get_function(function_code) + + def execute_function(self) -> list[dotdict | str] | dotdict | str: + function_code = self.function_code + + if not function_code: + return "No function code provided." + + try: + func = get_function(function_code) + return func() + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error executing function") + return f"Error executing function: {e}" + + def execute_function_data(self) -> list[Data]: + results = self.execute_function() + results = results if isinstance(results, list) else [results] + return [(Data(text=x) if isinstance(x, str) else Data(**x)) for x in results] + + def execute_function_message(self) -> Message: + results = self.execute_function() + results = results if isinstance(results, list) else [results] + results_list = [str(x) for x in results] + results_str = "\n".join(results_list) + return Message(text=results_str) diff --git a/src/backend/base/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py deleted file mode 100644 index 8c4252c26481..000000000000 --- a/src/backend/base/langflow/components/retrievers/AmazonKendra.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import Optional, cast - -from langchain_community.retrievers import AmazonKendraRetriever - -from langflow.custom import CustomComponent -from langflow.field_typing import Retriever - - -class AmazonKendraRetrieverComponent(CustomComponent): - display_name: str = "Amazon Kendra Retriever" - description: str = "Retriever that uses the Amazon Kendra API." - name = "AmazonKendra" - icon = "Amazon" - - def build_config(self): - return { - "index_id": {"display_name": "Index ID"}, - "region_name": {"display_name": "Region Name"}, - "credentials_profile_name": {"display_name": "Credentials Profile Name"}, - "attribute_filter": { - "display_name": "Attribute Filter", - "field_type": "code", - }, - "top_k": {"display_name": "Top K", "field_type": "int"}, - "user_context": { - "display_name": "User Context", - "field_type": "code", - }, - "code": {"show": False}, - } - - def build( - self, - index_id: str, - top_k: int = 3, - region_name: Optional[str] = None, - credentials_profile_name: Optional[str] = None, - attribute_filter: Optional[dict] = None, - user_context: Optional[dict] = None, - ) -> Retriever: # type: ignore[type-var] - try: - output = AmazonKendraRetriever( - index_id=index_id, - top_k=top_k, - region_name=region_name, - credentials_profile_name=credentials_profile_name, - attribute_filter=attribute_filter, - user_context=user_context, - ) # type: ignore - except Exception as e: - raise ValueError("Could not connect to AmazonKendra API.") from e - return cast(Retriever, output) diff --git a/src/backend/base/langflow/components/retrievers/CohereRerank.py b/src/backend/base/langflow/components/retrievers/CohereRerank.py deleted file mode 100644 index a315532a1a69..000000000000 --- a/src/backend/base/langflow/components/retrievers/CohereRerank.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import List, cast - -from langchain.retrievers import ContextualCompressionRetriever -from langchain_cohere import CohereRerank - -from langflow.base.vectorstores.model import LCVectorStoreComponent -from langflow.field_typing import Retriever, VectorStore -from langflow.io import ( - DropdownInput, - HandleInput, - IntInput, - MessageTextInput, - MultilineInput, - SecretStrInput, -) -from langflow.schema import Data -from langflow.template.field.base import Output - - -class CohereRerankComponent(LCVectorStoreComponent): - display_name = "Cohere Rerank" - description = "Rerank documents using the Cohere API and a retriever." - name = "CohereRerank" - icon = "Cohere" - - inputs = [ - MultilineInput( - name="search_query", - display_name="Search Query", - ), - DropdownInput( - name="model", - display_name="Model", - options=[ - "rerank-english-v3.0", - "rerank-multilingual-v3.0", - "rerank-english-v2.0", - "rerank-multilingual-v2.0", - ], - value="rerank-english-v3.0", - ), - SecretStrInput(name="api_key", display_name="API Key"), - IntInput(name="top_n", display_name="Top N", value=3), - MessageTextInput( - name="user_agent", - display_name="User Agent", - value="langflow", - advanced=True, - ), - HandleInput(name="retriever", display_name="Retriever", input_types=["Retriever"]), - ] - - outputs = [ - Output( - display_name="Retriever", - name="base_retriever", - method="build_base_retriever", - ), - Output( - display_name="Search Results", - name="search_results", - method="search_documents", - ), - ] - - def build_base_retriever(self) -> Retriever: # type: ignore[type-var] - cohere_reranker = CohereRerank( - cohere_api_key=self.api_key, - model=self.model, - top_n=self.top_n, - user_agent=self.user_agent, - ) - retriever = ContextualCompressionRetriever(base_compressor=cohere_reranker, base_retriever=self.retriever) - return cast(Retriever, retriever) - - async def search_documents(self) -> List[Data]: # type: ignore - retriever = self.build_base_retriever() - documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) - data = self.to_data(documents) - self.status = data - return data - - def build_vector_store(self) -> VectorStore: - raise NotImplementedError("Cohere Rerank does not support vector stores.") diff --git a/src/backend/base/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py deleted file mode 100644 index f5ce37d05e1a..000000000000 --- a/src/backend/base/langflow/components/retrievers/MetalRetriever.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional, cast - -from langchain_community.retrievers import MetalRetriever -from metal_sdk.metal import Metal # type: ignore - -from langflow.custom import CustomComponent -from langflow.field_typing import Retriever - - -class MetalRetrieverComponent(CustomComponent): - display_name: str = "Metal Retriever" - description: str = "Retriever that uses the Metal API." - name = "MetalRetriever" - - def build_config(self): - return { - "api_key": {"display_name": "API Key", "password": True}, - "client_id": {"display_name": "Client ID", "password": True}, - "index_id": {"display_name": "Index ID"}, - "params": {"display_name": "Parameters"}, - "code": {"show": False}, - } - - def build(self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None) -> Retriever: # type: ignore[type-var] - try: - metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id) - except Exception as e: - raise ValueError("Could not connect to Metal API.") from e - return cast(Retriever, MetalRetriever(client=metal, params=params or {})) diff --git a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py deleted file mode 100644 index c662f701e339..000000000000 --- a/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Optional - -from langchain.retrievers import MultiQueryRetriever - -from langflow.custom import CustomComponent -from langflow.field_typing import BaseRetriever, LanguageModel, PromptTemplate, Text - - -class MultiQueryRetrieverComponent(CustomComponent): - display_name = "MultiQueryRetriever" - description = "Initialize from llm using default template." - documentation = "https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever" - name = "MultiQueryRetriever" - - def build_config(self): - return { - "llm": {"display_name": "LLM"}, - "prompt": { - "display_name": "Prompt", - "default": { - "input_variables": ["question"], - "input_types": {}, - "output_parser": None, - "partial_variables": {}, - "template": "You are an AI language model assistant. Your task is \n" - "to generate 3 different versions of the given user \n" - "question to retrieve relevant documents from a vector database. \n" - "By generating multiple perspectives on the user question, \n" - "your goal is to help the user overcome some of the limitations \n" - "of distance-based similarity search. Provide these alternative \n" - "questions separated by newlines. Original question: {question}", - "template_format": "f-string", - "validate_template": False, - "_type": "prompt", - }, - }, - "retriever": {"display_name": "Retriever"}, - "parser_key": {"display_name": "Parser Key", "default": "lines"}, - } - - def build( - self, - llm: LanguageModel, - retriever: BaseRetriever, - prompt: Optional[Text] = None, - parser_key: str = "lines", - ) -> MultiQueryRetriever: - if not prompt: - return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key) - else: - prompt_template = PromptTemplate.from_template(prompt) - return MultiQueryRetriever.from_llm( - llm=llm, retriever=retriever, prompt=prompt_template, parser_key=parser_key - ) diff --git a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py b/src/backend/base/langflow/components/retrievers/NvidiaRerank.py deleted file mode 100644 index a654cf3f9fc3..000000000000 --- a/src/backend/base/langflow/components/retrievers/NvidiaRerank.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import Any, List, cast - -from langchain.retrievers import ContextualCompressionRetriever - -from langflow.base.vectorstores.model import LCVectorStoreComponent -from langflow.field_typing import Retriever, VectorStore -from langflow.io import DropdownInput, HandleInput, MultilineInput, SecretStrInput, StrInput -from langflow.schema import Data -from langflow.schema.dotdict import dotdict -from langflow.template.field.base import Output - - -class NvidiaRerankComponent(LCVectorStoreComponent): - display_name = "NVIDIA Rerank" - description = "Rerank documents using the NVIDIA API and a retriever." - icon = "NVIDIA" - - inputs = [ - MultilineInput( - name="search_query", - display_name="Search Query", - ), - StrInput( - name="base_url", - display_name="Base URL", - value="https://integrate.api.nvidia.com/v1", - refresh_button=True, - info="The base URL of the NVIDIA API. Defaults to https://integrate.api.nvidia.com/v1.", - ), - DropdownInput( - name="model", display_name="Model", options=["nv-rerank-qa-mistral-4b:1"], value="nv-rerank-qa-mistral-4b:1" - ), - SecretStrInput(name="api_key", display_name="API Key"), - HandleInput(name="retriever", display_name="Retriever", input_types=["Retriever"]), - ] - - outputs = [ - Output( - display_name="Retriever", - name="base_retriever", - method="build_base_retriever", - ), - Output( - display_name="Search Results", - name="search_results", - method="search_documents", - ), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): - if field_name == "base_url" and field_value: - try: - build_model = self.build_model() - ids = [model.id for model in build_model.available_models] - build_config["model"]["options"] = ids - build_config["model"]["value"] = ids[0] - except Exception as e: - raise ValueError(f"Error getting model names: {e}") - return build_config - - def build_model(self): - try: - from langchain_nvidia_ai_endpoints import NVIDIARerank - except ImportError: - raise ImportError("Please install langchain-nvidia-ai-endpoints to use the NVIDIA model.") - return NVIDIARerank(api_key=self.api_key, model=self.model, base_url=self.base_url) - - def build_base_retriever(self) -> Retriever: # type: ignore[type-var] - nvidia_reranker = self.build_model() - retriever = ContextualCompressionRetriever(base_compressor=nvidia_reranker, base_retriever=self.retriever) - return cast(Retriever, retriever) - - async def search_documents(self) -> List[Data]: # type: ignore - retriever = self.build_base_retriever() - documents = await retriever.ainvoke(self.search_query, config={"callbacks": self.get_langchain_callbacks()}) - data = self.to_data(documents) - self.status = data - return data - - def build_vector_store(self) -> VectorStore: - raise NotImplementedError("NVIDIA Rerank does not support vector stores.") diff --git a/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py b/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py deleted file mode 100644 index 149c7325c853..000000000000 --- a/src/backend/base/langflow/components/retrievers/SelfQueryRetriever.py +++ /dev/null @@ -1,70 +0,0 @@ -# from langflow.field_typing import Data -from langchain.chains.query_constructor.base import AttributeInfo -from langchain.retrievers.self_query.base import SelfQueryRetriever -from langchain_core.vectorstores import VectorStore - -from langflow.custom import CustomComponent -from langflow.field_typing import LanguageModel, Text -from langflow.schema import Data -from langflow.schema.message import Message - - -class SelfQueryRetrieverComponent(CustomComponent): - display_name: str = "Self Query Retriever" - description: str = "Retriever that uses a vector store and an LLM to generate the vector store queries." - name = "SelfQueryRetriever" - icon = "LangChain" - - def build_config(self): - return { - "query": { - "display_name": "Query", - "input_types": ["Message", "Text"], - "info": "Query to be passed as input.", - }, - "vectorstore": { - "display_name": "Vector Store", - "info": "Vector Store to be passed as input.", - }, - "attribute_infos": { - "display_name": "Metadata Field Info", - "info": "Metadata Field Info to be passed as input.", - }, - "document_content_description": { - "display_name": "Document Content Description", - "info": "Document Content Description to be passed as input.", - }, - "llm": { - "display_name": "LLM", - "info": "LLM to be passed as input.", - }, - } - - def build( - self, - query: Message, - vectorstore: VectorStore, - attribute_infos: list[Data], - document_content_description: Text, - llm: LanguageModel, - ) -> Data: - metadata_field_infos = [AttributeInfo(**value.data) for value in attribute_infos] - self_query_retriever = SelfQueryRetriever.from_llm( - llm=llm, - vectorstore=vectorstore, - document_contents=document_content_description, - metadata_field_info=metadata_field_infos, - enable_limit=True, - ) - - if isinstance(query, Message): - input_text = query.text - elif isinstance(query, str): - input_text = query - - if not isinstance(query, str): - raise ValueError(f"Query type {type(query)} not supported.") - documents = self_query_retriever.invoke(input=input_text, config={"callbacks": self.get_langchain_callbacks()}) - data = [Data.from_document(document) for document in documents] - self.status = data - return data # type: ignore diff --git a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py deleted file mode 100644 index d8580b55a3e9..000000000000 --- a/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py +++ /dev/null @@ -1,63 +0,0 @@ -import json -from typing import List, cast - -from langchain.chains.query_constructor.base import AttributeInfo -from langchain.retrievers.self_query.base import SelfQueryRetriever -from langchain_core.vectorstores import VectorStore - -from langflow.custom import CustomComponent -from langflow.field_typing import Retriever -from langflow.field_typing.constants import LanguageModel - - -class VectaraSelfQueryRetriverComponent(CustomComponent): - """ - A custom component for implementing Vectara Self Query Retriever using a vector store. - """ - - display_name: str = "Vectara Self Query Retriever for Vectara Vector Store" - description: str = "Implementation of Vectara Self Query Retriever" - documentation = "https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query" - name = "VectaraSelfQueryRetriver" - icon = "Vectara" - - field_config = { - "code": {"show": True}, - "vectorstore": {"display_name": "Vector Store", "info": "Input Vectara Vectore Store"}, - "llm": {"display_name": "LLM", "info": "For self query retriever"}, - "document_content_description": { - "display_name": "Document Content Description", - "info": "For self query retriever", - }, - "metadata_field_info": { - "display_name": "Metadata Field Info", - "info": 'Each metadata field info is a string in the form of key value pair dictionary containing additional search metadata.\nExample input: {"name":"speech","description":"what name of the speech","type":"string or list[string]"}.\nThe keys should remain constant(name, description, type)', - }, - } - - def build( - self, - vectorstore: VectorStore, - document_content_description: str, - llm: LanguageModel, - metadata_field_info: List[str], - ) -> Retriever: # type: ignore - metadata_field_obj = [] - - for meta in metadata_field_info: - meta_obj = json.loads(meta) - if "name" not in meta_obj or "description" not in meta_obj or "type" not in meta_obj: - raise Exception("Incorrect metadata field info format.") - attribute_info = AttributeInfo( - name=meta_obj["name"], - description=meta_obj["description"], - type=meta_obj["type"], - ) - metadata_field_obj.append(attribute_info) - - return cast( - Retriever, - SelfQueryRetriever.from_llm( - llm, vectorstore, document_content_description, metadata_field_obj, verbose=True - ), - ) diff --git a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py deleted file mode 100644 index b4ef2c846495..000000000000 --- a/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py +++ /dev/null @@ -1,18 +0,0 @@ -from langchain_core.vectorstores import VectorStoreRetriever - -from langflow.custom import CustomComponent -from langflow.field_typing import VectorStore - - -class VectoStoreRetrieverComponent(CustomComponent): - display_name = "VectorStore Retriever" - description = "A vector store retriever" - name = "VectorStoreRetriever" - - def build_config(self): - return { - "vectorstore": {"display_name": "Vector Store", "type": VectorStore}, - } - - def build(self, vectorstore: VectorStore) -> VectorStoreRetriever: - return vectorstore.as_retriever() diff --git a/src/backend/base/langflow/components/retrievers/__init__.py b/src/backend/base/langflow/components/retrievers/__init__.py index 35030bc5ef1c..f5de9b4cdeb7 100644 --- a/src/backend/base/langflow/components/retrievers/__init__.py +++ b/src/backend/base/langflow/components/retrievers/__init__.py @@ -1,13 +1,9 @@ -from .AmazonKendra import AmazonKendraRetrieverComponent -from .MetalRetriever import MetalRetrieverComponent -from .MultiQueryRetriever import MultiQueryRetrieverComponent -from .VectaraSelfQueryRetriver import VectaraSelfQueryRetriverComponent -from .VectorStoreRetriever import VectoStoreRetrieverComponent +from .amazon_kendra import AmazonKendraRetrieverComponent +from .metal import MetalRetrieverComponent +from .multi_query import MultiQueryRetrieverComponent __all__ = [ "AmazonKendraRetrieverComponent", "MetalRetrieverComponent", "MultiQueryRetrieverComponent", - "VectaraSelfQueryRetriverComponent", - "VectoStoreRetrieverComponent", ] diff --git a/src/backend/base/langflow/components/retrievers/amazon_kendra.py b/src/backend/base/langflow/components/retrievers/amazon_kendra.py new file mode 100644 index 000000000000..21c12c00385a --- /dev/null +++ b/src/backend/base/langflow/components/retrievers/amazon_kendra.py @@ -0,0 +1,54 @@ +from typing import cast + +from langchain_community.retrievers import AmazonKendraRetriever + +from langflow.custom import CustomComponent +from langflow.field_typing import Retriever + + +class AmazonKendraRetrieverComponent(CustomComponent): + display_name: str = "Amazon Kendra Retriever" + description: str = "Retriever that uses the Amazon Kendra API." + name = "AmazonKendra" + icon = "Amazon" + legacy: bool = True + + def build_config(self): + return { + "index_id": {"display_name": "Index ID"}, + "region_name": {"display_name": "Region Name"}, + "credentials_profile_name": {"display_name": "Credentials Profile Name"}, + "attribute_filter": { + "display_name": "Attribute Filter", + "field_type": "code", + }, + "top_k": {"display_name": "Top K", "field_type": "int"}, + "user_context": { + "display_name": "User Context", + "field_type": "code", + }, + "code": {"show": False}, + } + + def build( + self, + index_id: str, + top_k: int = 3, + region_name: str | None = None, + credentials_profile_name: str | None = None, + attribute_filter: dict | None = None, + user_context: dict | None = None, + ) -> Retriever: # type: ignore[type-var] + try: + output = AmazonKendraRetriever( + index_id=index_id, + top_k=top_k, + region_name=region_name, + credentials_profile_name=credentials_profile_name, + attribute_filter=attribute_filter, + user_context=user_context, + ) + except Exception as e: + msg = "Could not connect to AmazonKendra API." + raise ValueError(msg) from e + return cast(Retriever, output) diff --git a/src/backend/base/langflow/components/retrievers/metal.py b/src/backend/base/langflow/components/retrievers/metal.py new file mode 100644 index 000000000000..f7dba35eb981 --- /dev/null +++ b/src/backend/base/langflow/components/retrievers/metal.py @@ -0,0 +1,31 @@ +from typing import cast + +from langchain_community.retrievers import MetalRetriever +from metal_sdk.metal import Metal + +from langflow.custom import CustomComponent +from langflow.field_typing import Retriever + + +class MetalRetrieverComponent(CustomComponent): + display_name: str = "Metal Retriever" + description: str = "Retriever that uses the Metal API." + name = "MetalRetriever" + legacy: bool = True + + def build_config(self): + return { + "api_key": {"display_name": "API Key", "password": True}, + "client_id": {"display_name": "Client ID", "password": True}, + "index_id": {"display_name": "Index ID"}, + "params": {"display_name": "Parameters"}, + "code": {"show": False}, + } + + def build(self, api_key: str, client_id: str, index_id: str, params: dict | None = None) -> Retriever: # type: ignore[type-var] + try: + metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id) + except Exception as e: + msg = "Could not connect to Metal API." + raise ValueError(msg) from e + return cast(Retriever, MetalRetriever(client=metal, params=params or {})) diff --git a/src/backend/base/langflow/components/retrievers/multi_query.py b/src/backend/base/langflow/components/retrievers/multi_query.py new file mode 100644 index 000000000000..6d381affcf8a --- /dev/null +++ b/src/backend/base/langflow/components/retrievers/multi_query.py @@ -0,0 +1,50 @@ +from langchain.retrievers import MultiQueryRetriever + +from langflow.custom import CustomComponent +from langflow.field_typing import BaseRetriever, LanguageModel, PromptTemplate, Text + + +class MultiQueryRetrieverComponent(CustomComponent): + display_name = "MultiQueryRetriever" + description = "Initialize from llm using default template." + documentation = "https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever" + name = "MultiQueryRetriever" + legacy: bool = True + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "prompt": { + "display_name": "Prompt", + "default": { + "input_variables": ["question"], + "input_types": {}, + "output_parser": None, + "partial_variables": {}, + "template": "You are an AI language model assistant. Your task is \n" + "to generate 3 different versions of the given user \n" + "question to retrieve relevant documents from a vector database. \n" + "By generating multiple perspectives on the user question, \n" + "your goal is to help the user overcome some of the limitations \n" + "of distance-based similarity search. Provide these alternative \n" + "questions separated by newlines. Original question: {question}", + "template_format": "f-string", + "validate_template": False, + "_type": "prompt", + }, + }, + "retriever": {"display_name": "Retriever"}, + "parser_key": {"display_name": "Parser Key", "default": "lines"}, + } + + def build( + self, + llm: LanguageModel, + retriever: BaseRetriever, + prompt: Text | None = None, + parser_key: str = "lines", + ) -> MultiQueryRetriever: + if not prompt: + return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key) + prompt_template = PromptTemplate.from_template(prompt) + return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, prompt=prompt_template, parser_key=parser_key) diff --git a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py deleted file mode 100644 index 1b70fcb3bd11..000000000000 --- a/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Any - -from langchain_text_splitters import CharacterTextSplitter, TextSplitter - -from langflow.base.textsplitters.model import LCTextSplitterComponent -from langflow.inputs import DataInput, IntInput, MessageTextInput -from langflow.utils.util import unescape_string - - -class CharacterTextSplitterComponent(LCTextSplitterComponent): - display_name = "CharacterTextSplitter" - description = "Split text by number of characters." - documentation = "https://docs.langflow.org/components/text-splitters#charactertextsplitter" - name = "CharacterTextSplitter" - - inputs = [ - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum length of each chunk.", - value=1000, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="The amount of overlap between chunks.", - value=200, - ), - DataInput( - name="data_input", - display_name="Input", - info="The texts to split.", - input_types=["Document", "Data"], - ), - MessageTextInput( - name="separator", - display_name="Separator", - info='The characters to split on.\nIf left empty defaults to "\\n\\n".', - ), - ] - - def get_data_input(self) -> Any: - return self.data_input - - def build_text_splitter(self) -> TextSplitter: - if self.separator: - separator = unescape_string(self.separator) - else: - separator = "\n\n" - return CharacterTextSplitter( - chunk_overlap=self.chunk_overlap, - chunk_size=self.chunk_size, - separator=separator, - ) diff --git a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py deleted file mode 100644 index f1fc9025251c..000000000000 --- a/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any - -from langchain_text_splitters import Language, RecursiveCharacterTextSplitter, TextSplitter - -from langflow.base.textsplitters.model import LCTextSplitterComponent -from langflow.inputs import IntInput, DataInput, DropdownInput - - -class LanguageRecursiveTextSplitterComponent(LCTextSplitterComponent): - display_name: str = "Language Recursive Text Splitter" - description: str = "Split text into chunks of a specified length based on language." - documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter" - name = "LanguageRecursiveTextSplitter" - - inputs = [ - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum length of each chunk.", - value=1000, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="The amount of overlap between chunks.", - value=200, - ), - DataInput( - name="data_input", - display_name="Input", - info="The texts to split.", - input_types=["Document", "Data"], - ), - DropdownInput( - name="code_language", display_name="Code Language", options=[x.value for x in Language], value="python" - ), - ] - - def get_data_input(self) -> Any: - return self.data_input - - def build_text_splitter(self) -> TextSplitter: - return RecursiveCharacterTextSplitter.from_language( - language=Language(self.code_language), - chunk_size=self.chunk_size, - chunk_overlap=self.chunk_overlap, - ) diff --git a/src/backend/base/langflow/components/textsplitters/NaturalLanguageTextSplitter.py b/src/backend/base/langflow/components/textsplitters/NaturalLanguageTextSplitter.py deleted file mode 100644 index e70e48bb8ea5..000000000000 --- a/src/backend/base/langflow/components/textsplitters/NaturalLanguageTextSplitter.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import Any - -from langchain_text_splitters import NLTKTextSplitter, TextSplitter - -from langflow.base.textsplitters.model import LCTextSplitterComponent -from langflow.inputs import DataInput, IntInput, MessageTextInput -from langflow.utils.util import unescape_string - - -class NaturalLanguageTextSplitterComponent(LCTextSplitterComponent): - display_name = "Natural Language Text Splitter" - description = "Split text based on natural language boundaries, optimized for a specified language." - documentation = ( - "https://python.langchain.com/v0.1/docs/modules/data_connection/document_transformers/split_by_token/#nltk" - ) - name = "NaturalLanguageTextSplitter" - - inputs = [ - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum number of characters in each chunk after splitting.", - value=1000, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="The number of characters that overlap between consecutive chunks.", - value=200, - ), - DataInput( - name="data_input", - display_name="Input", - info="The text data to be split.", - input_types=["Document", "Data"], - ), - MessageTextInput( - name="separator", - display_name="Separator", - info='The character(s) to use as a delimiter when splitting text.\nDefaults to "\\n\\n" if left empty.', - ), - MessageTextInput( - name="language", - display_name="Language", - info='The language of the text. Default is "English". Supports multiple languages for better text boundary recognition.', - ), - ] - - def get_data_input(self) -> Any: - return self.data_input - - def build_text_splitter(self) -> TextSplitter: - if self.separator: - separator = unescape_string(self.separator) - else: - separator = "\n\n" - return NLTKTextSplitter( - language=self.language.lower() if self.language else "english", - separator=separator, - chunk_size=self.chunk_size, - chunk_overlap=self.chunk_overlap, - ) diff --git a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py deleted file mode 100644 index 50103a3f9f17..000000000000 --- a/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py +++ /dev/null @@ -1,56 +0,0 @@ -from typing import Any -from langchain_text_splitters import RecursiveCharacterTextSplitter, TextSplitter -from langflow.base.textsplitters.model import LCTextSplitterComponent -from langflow.inputs.inputs import DataInput, IntInput, MessageTextInput -from langflow.utils.util import unescape_string - - -class RecursiveCharacterTextSplitterComponent(LCTextSplitterComponent): - display_name: str = "Recursive Character Text Splitter" - description: str = "Split text trying to keep all related text together." - documentation: str = "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter" - name = "RecursiveCharacterTextSplitter" - - inputs = [ - IntInput( - name="chunk_size", - display_name="Chunk Size", - info="The maximum length of each chunk.", - value=1000, - ), - IntInput( - name="chunk_overlap", - display_name="Chunk Overlap", - info="The amount of overlap between chunks.", - value=200, - ), - DataInput( - name="data_input", - display_name="Input", - info="The texts to split.", - input_types=["Document", "Data"], - ), - MessageTextInput( - name="separators", - display_name="Separators", - info='The characters to split on.\nIf left empty defaults to ["\\n\\n", "\\n", " ", ""].', - is_list=True, - ), - ] - - def get_data_input(self) -> Any: - return self.data_input - - def build_text_splitter(self) -> TextSplitter: - if not self.separators: - separators: list[str] | None = None - else: - # check if the separators list has escaped characters - # if there are escaped characters, unescape them - separators = [unescape_string(x) for x in self.separators] - - return RecursiveCharacterTextSplitter( - separators=separators, - chunk_size=self.chunk_size, - chunk_overlap=self.chunk_overlap, - ) diff --git a/src/backend/base/langflow/components/textsplitters/__init__.py b/src/backend/base/langflow/components/textsplitters/__init__.py index 1b3d02a72840..e69de29bb2d1 100644 --- a/src/backend/base/langflow/components/textsplitters/__init__.py +++ b/src/backend/base/langflow/components/textsplitters/__init__.py @@ -1,11 +0,0 @@ -from .CharacterTextSplitter import CharacterTextSplitterComponent -from .LanguageRecursiveTextSplitter import LanguageRecursiveTextSplitterComponent -from .RecursiveCharacterTextSplitter import RecursiveCharacterTextSplitterComponent -from .NaturalLanguageTextSplitter import NaturalLanguageTextSplitterComponent - -__all__ = [ - "CharacterTextSplitterComponent", - "LanguageRecursiveTextSplitterComponent", - "RecursiveCharacterTextSplitterComponent", - "NaturalLanguageTextSplitterComponent", -] diff --git a/src/backend/base/langflow/components/toolkits/ComposioAPI.py b/src/backend/base/langflow/components/toolkits/ComposioAPI.py deleted file mode 100644 index ea76eac5a784..000000000000 --- a/src/backend/base/langflow/components/toolkits/ComposioAPI.py +++ /dev/null @@ -1,175 +0,0 @@ -from typing import Any, Sequence - -from composio_langchain import Action, App, ComposioToolSet # type: ignore -from langchain_core.tools import Tool -from loguru import logger - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import DropdownInput, MessageTextInput, MultiselectInput, SecretStrInput, StrInput - - -class ComposioAPIComponent(LCToolComponent): - display_name: str = "Composio Tools" - description: str = "Use Composio toolset to run actions with your agent" - name = "ComposioAPI" - icon = "Composio" - documentation: str = "https://docs.composio.dev" - - inputs = [ - MessageTextInput(name="entity_id", display_name="Entity ID", value="default", advanced=True), - SecretStrInput( - name="api_key", - display_name="Composio API Key", - required=True, - refresh_button=True, - info="Refer to https://docs.composio.dev/introduction/foundations/howtos/get_api_key", - ), - DropdownInput( - name="app_names", - display_name="App Name", - options=[app_name for app_name in App.__annotations__], - value="", - info="The app name to use. Please refresh after selecting app name", - refresh_button=True, - ), - MultiselectInput( - name="action_names", - display_name="Actions to use", - required=False, - options=[], - value=[], - info="The actions to pass to agent to execute", - ), - StrInput( - name="auth_status_config", - display_name="Auth status", - value="", - refresh_button=True, - info="Open link or enter api key. Then refresh button", - ), - ] - - def _check_for_authorization(self, app: str) -> str: - """ - Checks if the app is authorized. - - Args: - app (str): The app name to check authorization for. - - Returns: - str: The authorization status. - """ - toolset = self._build_wrapper() - entity = toolset.client.get_entity(id=self.entity_id) - try: - entity.get_connection(app=app) - return f"{app} CONNECTED" - except Exception: - return self._handle_authorization_failure(toolset, entity, app) - - def _handle_authorization_failure(self, toolset: ComposioToolSet, entity: Any, app: str) -> str: - """ - Handles the authorization failure by attempting to process API key auth or initiate default connection. - - Args: - toolset (ComposioToolSet): The toolset instance. - entity (Any): The entity instance. - app (str): The app name. - - Returns: - str: The result of the authorization failure message. - """ - try: - auth_schemes = toolset.client.apps.get(app).auth_schemes - if auth_schemes[0].auth_mode == "API_KEY": - return self._process_api_key_auth(entity, app) - else: - return self._initiate_default_connection(entity, app) - except Exception as exc: - logger.error(f"Authorization error: {str(exc)}") - return "Error" - - def _process_api_key_auth(self, entity: Any, app: str) -> str: - """ - Processes the API key authentication. - - Args: - entity (Any): The entity instance. - app (str): The app name. - - Returns: - str: The status of the API key authentication. - """ - auth_status_config = self.auth_status_config - is_url = "http" in auth_status_config or "https" in auth_status_config - is_different_app = "CONNECTED" in auth_status_config and app not in auth_status_config - is_default_api_key_message = "API Key" in auth_status_config - - if is_different_app or is_url or is_default_api_key_message: - return "Enter API Key" - else: - if not is_default_api_key_message: - entity.initiate_connection( - app_name=app, - auth_mode="API_KEY", - auth_config={"api_key": self.auth_status_config}, - use_composio_auth=False, - force_new_integration=True, - ) - return f"{app} CONNECTED" - else: - return "Enter API Key" - - def _initiate_default_connection(self, entity: Any, app: str) -> str: - connection = entity.initiate_connection(app_name=app, use_composio_auth=True, force_new_integration=True) - return connection.redirectUrl - - def _get_connected_app_names_for_entity(self) -> list[str]: - toolset = self._build_wrapper() - connections = toolset.client.get_entity(id=self.entity_id).get_connections() - return list(set(connection.appUniqueId for connection in connections)) - - def _update_app_names_with_connected_status(self, build_config: dict) -> dict: - connected_app_names = self._get_connected_app_names_for_entity() - - app_names = [ - f"{app_name}_CONNECTED" for app_name in App.__annotations__ if app_name.lower() in connected_app_names - ] - non_connected_app_names = [ - app_name for app_name in App.__annotations__ if app_name.lower() not in connected_app_names - ] - build_config["app_names"]["options"] = app_names + non_connected_app_names - build_config["app_names"]["value"] = app_names[0] if app_names else "" - return build_config - - def _get_normalized_app_name(self) -> str: - return self.app_names.replace("_CONNECTED", "").replace("_connected", "") - - def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None) -> dict: - if field_name == "api_key": - if hasattr(self, "api_key") and self.api_key != "": - build_config = self._update_app_names_with_connected_status(build_config) - return build_config - - if field_name in {"app_names", "auth_status_config"}: - if hasattr(self, "api_key") and self.api_key != "": - build_config["auth_status_config"]["value"] = self._check_for_authorization( - self._get_normalized_app_name() - ) - all_action_names = [action_name for action_name in Action.__annotations__] - app_action_names = [ - action_name - for action_name in all_action_names - if action_name.lower().startswith(self._get_normalized_app_name().lower() + "_") - ] - build_config["action_names"]["options"] = app_action_names - build_config["action_names"]["value"] = [app_action_names[0]] if app_action_names else [""] - return build_config - - def build_tool(self) -> Sequence[Tool]: - composio_toolset = self._build_wrapper() - composio_tools = composio_toolset.get_actions(actions=self.action_names) - return composio_tools - - def _build_wrapper(self) -> ComposioToolSet: - return ComposioToolSet(api_key=self.api_key) diff --git a/src/backend/base/langflow/components/toolkits/Metaphor.py b/src/backend/base/langflow/components/toolkits/Metaphor.py deleted file mode 100644 index 73c7645120a5..000000000000 --- a/src/backend/base/langflow/components/toolkits/Metaphor.py +++ /dev/null @@ -1,53 +0,0 @@ -from typing import List, Union - -from langchain_community.agent_toolkits.base import BaseToolkit -from langchain_core.tools import Tool, tool -from metaphor_python import Metaphor # type: ignore - -from langflow.custom import CustomComponent - - -class MetaphorToolkit(CustomComponent): - display_name: str = "Metaphor" - description: str = "Metaphor Toolkit" - documentation = "https://python.langchain.com/docs/integrations/tools/metaphor_search" - beta: bool = True - name = "Metaphor" - # api key should be password = True - field_config = { - "metaphor_api_key": {"display_name": "Metaphor API Key", "password": True}, - "code": {"advanced": True}, - } - - def build( - self, - metaphor_api_key: str, - use_autoprompt: bool = True, - search_num_results: int = 5, - similar_num_results: int = 5, - ) -> Union[Tool, BaseToolkit]: - # If documents, then we need to create a Vectara instance using .from_documents - client = Metaphor(api_key=metaphor_api_key) - - @tool - def search(query: str): - """Call search engine with a query.""" - return client.search(query, use_autoprompt=use_autoprompt, num_results=search_num_results) - - @tool - def get_contents(ids: List[str]): - """Get contents of a webpage. - - The ids passed in should be a list of ids as fetched from `search`. - """ - return client.get_contents(ids) - - @tool - def find_similar(url: str): - """Get search results similar to a given URL. - - The url passed in should be a URL returned from `search` - """ - return client.find_similar(url, num_results=similar_num_results) - - return [search, get_contents, find_similar] # type: ignore diff --git a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py deleted file mode 100644 index 41ab7ef7364a..000000000000 --- a/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py +++ /dev/null @@ -1,44 +0,0 @@ -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo -from langflow.custom import Component -from langflow.inputs import HandleInput, MultilineInput, MessageTextInput -from langflow.template import Output - - -class VectorStoreInfoComponent(Component): - display_name = "VectorStoreInfo" - description = "Information about a VectorStore" - name = "VectorStoreInfo" - - inputs = [ - MessageTextInput( - name="vectorstore_name", - display_name="Name", - info="Name of the VectorStore", - required=True, - ), - MultilineInput( - name="vectorstore_description", - display_name="Description", - info="Description of the VectorStore", - required=True, - ), - HandleInput( - name="input_vectorstore", - display_name="Vector Store", - input_types=["VectorStore"], - required=True, - ), - ] - - outputs = [ - Output(display_name="Vector Store Info", name="info", method="build_info"), - ] - - def build_info(self) -> VectorStoreInfo: - self.status = { - "name": self.vectorstore_name, - "description": self.vectorstore_description, - } - return VectorStoreInfo( - vectorstore=self.input_vectorstore, description=self.vectorstore_description, name=self.vectorstore_name - ) diff --git a/src/backend/base/langflow/components/toolkits/__init__.py b/src/backend/base/langflow/components/toolkits/__init__.py index 8d3e5c8cb342..e69de29bb2d1 100644 --- a/src/backend/base/langflow/components/toolkits/__init__.py +++ b/src/backend/base/langflow/components/toolkits/__init__.py @@ -1,9 +0,0 @@ -from .Metaphor import MetaphorToolkit -from .VectorStoreInfo import VectorStoreInfoComponent -from .ComposioAPI import ComposioAPIComponent - -__all__ = [ - "MetaphorToolkit", - "VectorStoreInfoComponent", - "ComposioAPIComponent", -] diff --git a/src/backend/base/langflow/components/tools/BingSearchAPI.py b/src/backend/base/langflow/components/tools/BingSearchAPI.py deleted file mode 100644 index b246cf13c0e6..000000000000 --- a/src/backend/base/langflow/components/tools/BingSearchAPI.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import List, cast - -from langchain_community.tools.bing_search import BingSearchResults -from langchain_community.utilities import BingSearchAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.field_typing import Tool -from langflow.inputs import IntInput, MessageTextInput, MultilineInput, SecretStrInput -from langflow.schema import Data - - -class BingSearchAPIComponent(LCToolComponent): - display_name = "Bing Search API" - description = "Call the Bing Search API." - name = "BingSearchAPI" - - inputs = [ - SecretStrInput(name="bing_subscription_key", display_name="Bing Subscription Key"), - MultilineInput( - name="input_value", - display_name="Input", - ), - MessageTextInput(name="bing_search_url", display_name="Bing Search URL", advanced=True), - IntInput(name="k", display_name="Number of results", value=4, required=True), - ] - - def run_model(self) -> List[Data]: - if self.bing_search_url: - wrapper = BingSearchAPIWrapper( - bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key - ) - else: - wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) # type: ignore - results = wrapper.results(query=self.input_value, num_results=self.k) - data = [Data(data=result, text=result["snippet"]) for result in results] - self.status = data - return data - - def build_tool(self) -> Tool: - if self.bing_search_url: - wrapper = BingSearchAPIWrapper( - bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key - ) - else: - wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) # type: ignore - return cast(Tool, BingSearchResults(api_wrapper=wrapper, num_results=self.k)) diff --git a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py b/src/backend/base/langflow/components/tools/GoogleSearchAPI.py deleted file mode 100644 index 8284aaa8edfc..000000000000 --- a/src/backend/base/langflow/components/tools/GoogleSearchAPI.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Union - -from langchain_core.tools import Tool - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import SecretStrInput, MultilineInput, IntInput -from langflow.schema import Data - - -class GoogleSearchAPIComponent(LCToolComponent): - display_name = "Google Search API" - description = "Call Google Search API." - name = "GoogleSearchAPI" - - inputs = [ - SecretStrInput(name="google_api_key", display_name="Google API Key", required=True), - SecretStrInput(name="google_cse_id", display_name="Google CSE ID", required=True), - MultilineInput( - name="input_value", - display_name="Input", - ), - IntInput(name="k", display_name="Number of results", value=4, required=True), - ] - - def run_model(self) -> Union[Data, list[Data]]: - wrapper = self._build_wrapper() - results = wrapper.results(query=self.input_value, num_results=self.k) - data = [Data(data=result, text=result["snippet"]) for result in results] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return Tool( - name="google_search", - description="Search Google for recent results.", - func=wrapper.run, - ) - - def _build_wrapper(self): - try: - from langchain_google_community import GoogleSearchAPIWrapper # type: ignore - except ImportError: - raise ImportError("Please install langchain-google-community to use GoogleSearchAPIWrapper.") - return GoogleSearchAPIWrapper(google_api_key=self.google_api_key, google_cse_id=self.google_cse_id, k=self.k) diff --git a/src/backend/base/langflow/components/tools/GoogleSerperAPI.py b/src/backend/base/langflow/components/tools/GoogleSerperAPI.py deleted file mode 100644 index 7b47b62dfdc5..000000000000 --- a/src/backend/base/langflow/components/tools/GoogleSerperAPI.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Union - -from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import SecretStrInput, MultilineInput, IntInput -from langflow.schema import Data -from langflow.field_typing import Tool - - -class GoogleSerperAPIComponent(LCToolComponent): - display_name = "Google Serper API" - description = "Call the Serper.dev Google Search API." - name = "GoogleSerperAPI" - - inputs = [ - SecretStrInput(name="serper_api_key", display_name="Serper API Key", required=True), - MultilineInput( - name="input_value", - display_name="Input", - ), - IntInput(name="k", display_name="Number of results", value=4, required=True), - ] - - def run_model(self) -> Union[Data, list[Data]]: - wrapper = self._build_wrapper() - results = wrapper.results(query=self.input_value) - list_results = results.get("organic", []) - data = [Data(data=result, text=result["snippet"]) for result in list_results] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return Tool( - name="google_search", - description="Search Google for recent results.", - func=wrapper.run, - ) - - def _build_wrapper(self): - return GoogleSerperAPIWrapper(serper_api_key=self.serper_api_key, k=self.k) diff --git a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py b/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py deleted file mode 100644 index 096d8abb41ee..000000000000 --- a/src/backend/base/langflow/components/tools/PythonCodeStructuredTool.py +++ /dev/null @@ -1,314 +0,0 @@ -import ast -import json -from typing import Any - -from langchain.agents import Tool -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs.inputs import MultilineInput, MessageTextInput, BoolInput, DropdownInput, HandleInput, FieldTypes -from langchain_core.tools import StructuredTool -from pydantic.v1 import Field, create_model -from pydantic.v1.fields import Undefined - -from langflow.io import Output -from langflow.schema import Data -from langflow.schema.dotdict import dotdict - - -class PythonCodeStructuredTool(LCToolComponent): - DEFAULT_KEYS = [ - "code", - "_type", - "text_key", - "tool_code", - "tool_name", - "tool_description", - "return_direct", - "tool_function", - "global_variables", - "_classes", - "_functions", - ] - display_name = "Python Code Structured Tool" - description = "structuredtool dataclass code to tool" - documentation = "https://python.langchain.com/docs/modules/tools/custom_tools/#structuredtool-dataclass" - name = "PythonCodeStructuredTool" - icon = "🐍" - field_order = ["name", "description", "tool_code", "return_direct", "tool_function"] - - inputs = [ - MultilineInput( - name="tool_code", - display_name="Tool Code", - info="Enter the dataclass code.", - placeholder="def my_function(args):\n pass", - required=True, - real_time_refresh=True, - refresh_button=True, - ), - MessageTextInput(name="tool_name", display_name="Tool Name", info="Enter the name of the tool.", required=True), - MessageTextInput( - name="tool_description", - display_name="Description", - info="Enter the description of the tool.", - required=True, - ), - BoolInput( - name="return_direct", - display_name="Return Directly", - info="Should the tool return the function output directly?", - ), - DropdownInput( - name="tool_function", - display_name="Tool Function", - info="Select the function for additional expressions.", - options=[], - required=True, - real_time_refresh=True, - refresh_button=True, - ), - HandleInput( - name="global_variables", - display_name="Global Variables", - info="Enter the global variables or Create Data Component.", - input_types=["Data"], - field_type=FieldTypes.DICT, - is_list=True, - ), - MessageTextInput(name="_classes", display_name="Classes", advanced=True), - MessageTextInput(name="_functions", display_name="Functions", advanced=True), - ] - - outputs = [ - Output(display_name="Tool", name="result_tool", method="build_tool"), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict: - if field_name is None: - return build_config - - if field_name != "tool_code" and field_name != "tool_function": - return build_config - - try: - named_functions = {} - [classes, functions] = self._parse_code(build_config["tool_code"]["value"]) - existing_fields = {} - if len(build_config) > len(self.DEFAULT_KEYS): - for key in build_config.copy(): - if key not in self.DEFAULT_KEYS: - existing_fields[key] = build_config.pop(key) - - names = [] - for func in functions: - named_functions[func["name"]] = func - names.append(func["name"]) - - for arg in func["args"]: - field_name = f"{func['name']}|{arg['name']}" - if field_name in existing_fields: - build_config[field_name] = existing_fields[field_name] - continue - - field = MessageTextInput( - display_name=f"{arg['name']}: Description", - name=field_name, - info=f"Enter the description for {arg['name']}", - required=True, - ) - build_config[field_name] = field.to_dict() - build_config["_functions"]["value"] = json.dumps(named_functions) - build_config["_classes"]["value"] = json.dumps(classes) - build_config["tool_function"]["options"] = names - except Exception as e: - self.status = f"Failed to extract names: {str(e)}" - build_config["tool_function"]["options"] = ["Failed to parse", str(e)] - return build_config - - async def build_tool(self) -> Tool: - _local_namespace = {} # type: ignore - modules = self._find_imports(self.tool_code) - import_code = "" - for module in modules["imports"]: - import_code += f"global {module}\nimport {module}\n" - for from_module in modules["from_imports"]: - for alias in from_module.names: - import_code += f"global {alias.name}\n" - import_code += ( - f"from {from_module.module} import {', '.join([alias.name for alias in from_module.names])}\n" - ) - exec(import_code, globals()) - exec(self.tool_code, globals(), _local_namespace) - - class PythonCodeToolFunc: - params: dict = {} - - def run(**kwargs): - for key in kwargs: - if key not in PythonCodeToolFunc.params: - PythonCodeToolFunc.params[key] = kwargs[key] - return _local_namespace[self.tool_function](**PythonCodeToolFunc.params) - - _globals = globals() - _local = {} # type: ignore - _local[self.tool_function] = PythonCodeToolFunc - _globals.update(_local) - - if isinstance(self.global_variables, list): - for data in self.global_variables: - if isinstance(data, Data): - _globals.update(data.data) - elif isinstance(self.global_variables, dict): - _globals.update(self.global_variables) - - classes = json.loads(self._attributes["_classes"]) - for class_dict in classes: - exec("\n".join(class_dict["code"]), _globals) - - named_functions = json.loads(self._attributes["_functions"]) - schema_fields = {} - - for attr in self._attributes: - if attr in self.DEFAULT_KEYS: - continue - - func_name = attr.split("|")[0] - field_name = attr.split("|")[1] - func_arg = self._find_arg(named_functions, func_name, field_name) - if func_arg is None: - raise Exception(f"Failed to find arg: {field_name}") - - field_annotation = func_arg["annotation"] - field_description = self._get_value(self._attributes[attr], str) - - if field_annotation: - exec(f"temp_annotation_type = {field_annotation}", _globals) - schema_annotation = _globals["temp_annotation_type"] - else: - schema_annotation = Any - schema_fields[field_name] = ( - schema_annotation, - Field( - default=func_arg["default"] if "default" in func_arg else Undefined, description=field_description - ), - ) - - if "temp_annotation_type" in _globals: - _globals.pop("temp_annotation_type") - - PythonCodeToolSchema = None - if schema_fields: - PythonCodeToolSchema = create_model("PythonCodeToolSchema", **schema_fields) # type: ignore - - tool = StructuredTool.from_function( - func=_local[self.tool_function].run, - args_schema=PythonCodeToolSchema, - name=self.tool_name, - description=self.tool_description, - return_direct=self.return_direct, - ) - return tool # type: ignore - - def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): - """ - This function is called after the code validation is done. - """ - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - frontend_node["template"] = self.update_build_config( - frontend_node["template"], frontend_node["template"]["tool_code"]["value"], "tool_code" - ) - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - for key in frontend_node["template"]: - if key in self.DEFAULT_KEYS: - continue - frontend_node["template"] = self.update_build_config( - frontend_node["template"], frontend_node["template"][key]["value"], key - ) - frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) - return frontend_node - - def _parse_code(self, code: str) -> tuple[list[dict], list[dict]]: - parsed_code = ast.parse(code) - lines = code.split("\n") - classes = [] - functions = [] - for node in parsed_code.body: - if isinstance(node, ast.ClassDef): - class_lines = lines[node.lineno - 1 : node.end_lineno] - class_lines[-1] = class_lines[-1][: node.end_col_offset] - class_lines[0] = class_lines[0][node.col_offset :] - classes.append( - { - "name": node.name, - "code": class_lines, - } - ) - continue - - if not isinstance(node, ast.FunctionDef): - continue - - func = {"name": node.name, "args": []} - for arg in node.args.args: - if arg.lineno != arg.end_lineno: - raise Exception("Multiline arguments are not supported") - - func_arg = { - "name": arg.arg, - "annotation": None, - } - - for default in node.args.defaults: - if ( - arg.lineno > default.lineno - or arg.col_offset > default.col_offset - or ( - arg.end_lineno is not None - and default.end_lineno is not None - and arg.end_lineno < default.end_lineno - ) - or ( - arg.end_col_offset is not None - and default.end_col_offset is not None - and arg.end_col_offset < default.end_col_offset - ) - ): - continue - - if isinstance(default, ast.Name): - func_arg["default"] = default.id - elif isinstance(default, ast.Constant): - func_arg["default"] = default.value - - if arg.annotation: - annotation_line = lines[arg.annotation.lineno - 1] - annotation_line = annotation_line[: arg.annotation.end_col_offset] - annotation_line = annotation_line[arg.annotation.col_offset :] - func_arg["annotation"] = annotation_line - if isinstance(func_arg["annotation"], str) and func_arg["annotation"].count("=") > 0: - func_arg["annotation"] = "=".join(func_arg["annotation"].split("=")[:-1]).strip() - if isinstance(func["args"], list): - func["args"].append(func_arg) - functions.append(func) - - return classes, functions - - def _find_imports(self, code: str) -> dotdict: - imports = [] - from_imports = [] - parsed_code = ast.parse(code) - for node in parsed_code.body: - if isinstance(node, ast.Import): - for alias in node.names: - imports.append(alias.name) - elif isinstance(node, ast.ImportFrom): - from_imports.append(node) - return dotdict({"imports": imports, "from_imports": from_imports}) - - def _get_value(self, value: Any, annotation: Any) -> Any: - return value if isinstance(value, annotation) else value["value"] - - def _find_arg(self, named_functions: dict, func_name: str, arg_name: str) -> dict | None: - for arg in named_functions[func_name]["args"]: - if arg["name"] == arg_name: - return arg - return None diff --git a/src/backend/base/langflow/components/tools/PythonREPLTool.py b/src/backend/base/langflow/components/tools/PythonREPLTool.py deleted file mode 100644 index 070630a75883..000000000000 --- a/src/backend/base/langflow/components/tools/PythonREPLTool.py +++ /dev/null @@ -1,68 +0,0 @@ -import importlib -from langchain_experimental.utilities import PythonREPL - -from langflow.base.tools.base import build_status_from_tool -from langflow.custom import CustomComponent -from langchain_core.tools import Tool - - -class PythonREPLToolComponent(CustomComponent): - display_name = "Python REPL Tool" - description = "A tool for running Python code in a REPL environment." - name = "PythonREPLTool" - - def build_config(self): - return { - "name": {"display_name": "Name", "info": "The name of the tool."}, - "description": {"display_name": "Description", "info": "A description of the tool."}, - "global_imports": { - "display_name": "Global Imports", - "info": "A list of modules to import globally, e.g. ['math', 'numpy'].", - }, - } - - def get_globals(self, globals: list[str]) -> dict: - """ - Retrieves the global variables from the specified modules. - - Args: - globals (list[str]): A list of module names. - - Returns: - dict: A dictionary containing the global variables from the specified modules. - """ - global_dict = {} - for module in globals: - try: - imported_module = importlib.import_module(module) - global_dict[imported_module.__name__] = imported_module - except ImportError: - raise ImportError(f"Could not import module {module}") - return global_dict - - def build( - self, - name: str = "python_repl", - description: str = "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.", - global_imports: list[str] = ["math"], - ) -> Tool: - """ - Builds a Python REPL tool. - - Args: - name (str, optional): The name of the tool. Defaults to "python_repl". - description (str, optional): The description of the tool. Defaults to "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`. ". - global_imports (list[str], optional): A list of global imports to be available in the Python REPL. Defaults to ["math"]. - - Returns: - Tool: The built Python REPL tool. - """ - _globals = self.get_globals(global_imports) - python_repl = PythonREPL(_globals=_globals) - tool = Tool( - name=name, - description=description, - func=python_repl.run, - ) - self.status = build_status_from_tool(tool) - return tool diff --git a/src/backend/base/langflow/components/tools/RetrieverTool.py b/src/backend/base/langflow/components/tools/RetrieverTool.py deleted file mode 100644 index 43dfbcba636f..000000000000 --- a/src/backend/base/langflow/components/tools/RetrieverTool.py +++ /dev/null @@ -1,33 +0,0 @@ -from langchain_core.tools import create_retriever_tool - -from langflow.custom import CustomComponent -from langflow.field_typing import BaseRetriever, Tool - - -class RetrieverToolComponent(CustomComponent): - display_name = "RetrieverTool" - description = "Tool for interacting with retriever" - name = "RetrieverTool" - - def build_config(self): - return { - "retriever": { - "display_name": "Retriever", - "info": "Retriever to interact with", - "type": BaseRetriever, - }, - "name": {"display_name": "Name", "info": "Name of the tool"}, - "description": {"display_name": "Description", "info": "Description of the tool"}, - } - - def build( - self, - retriever: BaseRetriever, - name: str, - description: str, - ) -> Tool: - return create_retriever_tool( - retriever=retriever, - name=name, - description=description, - ) diff --git a/src/backend/base/langflow/components/tools/SearXNGTool.py b/src/backend/base/langflow/components/tools/SearXNGTool.py deleted file mode 100644 index 86b3cd9cf854..000000000000 --- a/src/backend/base/langflow/components/tools/SearXNGTool.py +++ /dev/null @@ -1,141 +0,0 @@ -from typing import Any -import requests -import json - -from pydantic.v1 import Field, create_model - -from langchain.agents import Tool -from langchain_core.tools import StructuredTool -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import MessageTextInput, MultiselectInput, DropdownInput, IntInput -from langflow.schema.dotdict import dotdict -from langflow.io import Output - - -class SearXNGToolComponent(LCToolComponent): - search_headers: dict = {} - display_name = "SearXNG Search Tool" - description = "A component that searches for tools using SearXNG." - name = "SearXNGTool" - - inputs = [ - MessageTextInput( - name="url", - display_name="URL", - value="http://localhost", - required=True, - refresh_button=True, - ), - IntInput( - name="max_results", - display_name="Max Results", - value=10, - required=True, - ), - MultiselectInput( - name="categories", - display_name="Categories", - options=[], - value=[], - ), - DropdownInput( - name="language", - display_name="Language", - options=[], - ), - ] - - outputs = [ - Output(display_name="Tool", name="result_tool", method="build_tool"), - ] - - def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict: - if field_name is None: - return build_config - - if field_name != "url": - return build_config - - try: - url = f"{field_value}/config" - - response = requests.get(url=url, headers=self.search_headers.copy()) - data = None - if response.headers.get("Content-Encoding") == "zstd": - data = json.loads(response.content) - else: - data = response.json() - build_config["categories"]["options"] = data["categories"].copy() - for selected_category in build_config["categories"]["value"]: - if selected_category not in build_config["categories"]["options"]: - build_config["categories"]["value"].remove(selected_category) - languages = [] - for language in data["locales"]: - languages.append(language) - build_config["language"]["options"] = languages.copy() - except Exception as e: - self.status = f"Failed to extract names: {str(e)}" - build_config["categories"]["options"] = ["Failed to parse", str(e)] - return build_config - - def build_tool(self) -> Tool: - class SearxSearch: - _url: str = "" - _categories: list[str] = [] - _language: str = "" - _headers: dict = {} - _max_results: int = 10 - - @staticmethod - def search(query: str, categories: list[str] = []) -> list: - if not SearxSearch._categories and not categories: - raise ValueError("No categories provided.") - all_categories = SearxSearch._categories + list(set(categories) - set(SearxSearch._categories)) - try: - url = f"{SearxSearch._url}/" - headers = SearxSearch._headers.copy() - response = requests.get( - url=url, - headers=headers, - params={ - "q": query, - "categories": ",".join(all_categories), - "language": SearxSearch._language, - "format": "json", - }, - ).json() - - results = [] - num_results = min(SearxSearch._max_results, len(response["results"])) - for i in range(num_results): - results.append(response["results"][i]) - return results - except Exception as e: - return [f"Failed to search: {str(e)}"] - - SearxSearch._url = self.url - SearxSearch._categories = self.categories.copy() - SearxSearch._language = self.language - SearxSearch._headers = self.search_headers.copy() - SearxSearch._max_results = self.max_results - - _globals = globals() - _local = {} - _local["SearxSearch"] = SearxSearch - _globals.update(_local) - - schema_fields = { - "query": (str, Field(..., description="The query to search for.")), - "categories": (list[str], Field(default=[], description="The categories to search in.")), - } - - SearxSearchSchema = create_model("SearxSearchSchema", **schema_fields) # type: ignore - - tool = StructuredTool.from_function( - func=_local["SearxSearch"].search, - args_schema=SearxSearchSchema, - name="searxng_search_tool", - description="A tool that searches for tools using SearXNG.\nThe available categories are: " - + ", ".join(self.categories), - ) - return tool diff --git a/src/backend/base/langflow/components/tools/SearchAPI.py b/src/backend/base/langflow/components/tools/SearchAPI.py deleted file mode 100644 index fa85b7f1ac44..000000000000 --- a/src/backend/base/langflow/components/tools/SearchAPI.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Union - -from langchain_community.utilities.searchapi import SearchApiAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import SecretStrInput, MultilineInput, DictInput, MessageTextInput -from langflow.schema import Data -from langflow.field_typing import Tool - - -class SearchAPIComponent(LCToolComponent): - display_name: str = "Search API" - description: str = "Call the searchapi.io API" - name = "SearchAPI" - documentation: str = "https://www.searchapi.io/docs/google" - - inputs = [ - MessageTextInput(name="engine", display_name="Engine", value="google"), - SecretStrInput(name="api_key", display_name="SearchAPI API Key", required=True), - MultilineInput( - name="input_value", - display_name="Input", - ), - DictInput(name="search_params", display_name="Search parameters", advanced=True, is_list=True), - ] - - def run_model(self) -> Union[Data, list[Data]]: - wrapper = self._build_wrapper() - results = wrapper.results(query=self.input_value, **(self.search_params or {})) - list_results = results.get("organic_results", []) - data = [Data(data=result, text=result["snippet"]) for result in list_results] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return Tool( - name="search_api", - description="Search for recent results.", - func=lambda x: wrapper.run(query=x, **(self.search_params or {})), - ) - - def _build_wrapper(self): - return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key) diff --git a/src/backend/base/langflow/components/tools/SerpAPI.py b/src/backend/base/langflow/components/tools/SerpAPI.py deleted file mode 100644 index 5acb0d1f3c7e..000000000000 --- a/src/backend/base/langflow/components/tools/SerpAPI.py +++ /dev/null @@ -1,43 +0,0 @@ -from langchain_community.utilities.serpapi import SerpAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.inputs import SecretStrInput, DictInput, MultilineInput -from langflow.schema import Data -from langflow.field_typing import Tool - - -class SerpAPIComponent(LCToolComponent): - display_name = "Serp Search API" - description = "Call Serp Search API" - name = "SerpAPI" - - inputs = [ - SecretStrInput(name="serpapi_api_key", display_name="SerpAPI API Key", required=True), - MultilineInput( - name="input_value", - display_name="Input", - ), - DictInput(name="search_params", display_name="Parameters", advanced=True, is_list=True), - ] - - def run_model(self) -> list[Data]: - wrapper = self._build_wrapper() - results = wrapper.results(self.input_value) - list_results = results.get("organic_results", []) - data = [Data(data=result, text=result["snippet"]) for result in list_results] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return Tool(name="search_api", description="Search for recent results.", func=wrapper.run) - - def _build_wrapper(self) -> SerpAPIWrapper: - if self.search_params: - return SerpAPIWrapper( # type: ignore - serpapi_api_key=self.serpapi_api_key, - params=self.search_params, - ) - return SerpAPIWrapper( # type: ignore - serpapi_api_key=self.serpapi_api_key - ) diff --git a/src/backend/base/langflow/components/tools/WikipediaAPI.py b/src/backend/base/langflow/components/tools/WikipediaAPI.py deleted file mode 100644 index 21c1660501fb..000000000000 --- a/src/backend/base/langflow/components/tools/WikipediaAPI.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import cast -from langchain_community.tools import WikipediaQueryRun -from langchain_community.utilities.wikipedia import WikipediaAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.field_typing import Tool -from langflow.inputs import BoolInput, IntInput, MessageTextInput, MultilineInput -from langflow.schema import Data - - -class WikipediaAPIComponent(LCToolComponent): - display_name = "Wikipedia API" - description = "Call Wikipedia API." - name = "WikipediaAPI" - - inputs = [ - MultilineInput( - name="input_value", - display_name="Input", - ), - MessageTextInput(name="lang", display_name="Language", value="en"), - IntInput(name="k", display_name="Number of results", value=4, required=True), - BoolInput(name="load_all_available_meta", display_name="Load all available meta", value=False, advanced=True), - IntInput( - name="doc_content_chars_max", display_name="Document content characters max", value=4000, advanced=True - ), - ] - - def run_model(self) -> list[Data]: - wrapper = self._build_wrapper() - docs = wrapper.load(self.input_value) - data = [Data.from_document(doc) for doc in docs] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return cast(Tool, WikipediaQueryRun(api_wrapper=wrapper)) - - def _build_wrapper(self) -> WikipediaAPIWrapper: - return WikipediaAPIWrapper( # type: ignore - top_k_results=self.k, - lang=self.lang, - load_all_available_meta=self.load_all_available_meta, - doc_content_chars_max=self.doc_content_chars_max, - ) diff --git a/src/backend/base/langflow/components/tools/WolframAlphaAPI.py b/src/backend/base/langflow/components/tools/WolframAlphaAPI.py deleted file mode 100644 index 4f378b1a0475..000000000000 --- a/src/backend/base/langflow/components/tools/WolframAlphaAPI.py +++ /dev/null @@ -1,34 +0,0 @@ -from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper - -from langflow.base.langchain_utilities.model import LCToolComponent -from langflow.field_typing import Tool -from langflow.inputs import MultilineInput, SecretStrInput -from langflow.schema import Data - - -class WolframAlphaAPIComponent(LCToolComponent): - display_name = "WolframAlphaAPI" - description = "Call Wolfram Alpha API." - name = "WolframAlphaAPI" - - inputs = [ - MultilineInput( - name="input_value", - display_name="Input", - ), - SecretStrInput(name="app_id", display_name="App ID", required=True), - ] - - def run_model(self) -> list[Data]: - wrapper = self._build_wrapper() - result_str = wrapper.run(self.input_value) - data = [Data(text=result_str)] - self.status = data - return data - - def build_tool(self) -> Tool: - wrapper = self._build_wrapper() - return Tool(name="wolfram_alpha_api", description="Answers mathematical questions.", func=wrapper.run) - - def _build_wrapper(self) -> WolframAlphaAPIWrapper: - return WolframAlphaAPIWrapper(wolfram_alpha_appid=self.app_id) # type: ignore diff --git a/src/backend/base/langflow/components/tools/YfinanceTool.py b/src/backend/base/langflow/components/tools/YfinanceTool.py deleted file mode 100644 index 2a7fc7c16b53..000000000000 --- a/src/backend/base/langflow/components/tools/YfinanceTool.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import cast - -from langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool - -from langflow.custom import Component -from langflow.field_typing import Tool -from langflow.io import Output - - -class YfinanceToolComponent(Component): - display_name = "Yahoo Finance News Tool" - description = "Tool for interacting with Yahoo Finance News." - name = "YFinanceTool" - - outputs = [ - Output(display_name="Tool", name="tool", method="build_tool"), - ] - - def build_tool(self) -> Tool: - return cast(Tool, YahooFinanceNewsTool()) diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py index 6c411d6301ac..2ffabdb20b2d 100644 --- a/src/backend/base/langflow/components/tools/__init__.py +++ b/src/backend/base/langflow/components/tools/__init__.py @@ -1,26 +1,52 @@ -from .PythonREPLTool import PythonREPLToolComponent -from .RetrieverTool import RetrieverToolComponent -from .BingSearchAPI import BingSearchAPIComponent -from .GoogleSearchAPI import GoogleSearchAPIComponent -from .GoogleSerperAPI import GoogleSerperAPIComponent -from .PythonCodeStructuredTool import PythonCodeStructuredTool -from .SearchAPI import SearchAPIComponent -from .SearXNGTool import SearXNGToolComponent -from .SerpAPI import SerpAPIComponent -from .WikipediaAPI import WikipediaAPIComponent -from .WolframAlphaAPI import WolframAlphaAPIComponent +import warnings +from langchain_core._api.deprecation import LangChainDeprecationWarning + +from .bing_search_api import BingSearchAPIComponent +from .calculator import CalculatorToolComponent +from .duck_duck_go_search_run import DuckDuckGoSearchComponent +from .glean_search_api import GleanSearchAPIComponent +from .google_search_api import GoogleSearchAPIComponent +from .google_serper_api import GoogleSerperAPIComponent +from .metaphor import MetaphorToolkit +from .python_code_structured_tool import PythonCodeStructuredTool +from .python_repl import PythonREPLToolComponent +from .retriever import RetrieverToolComponent +from .search_api import SearchAPIComponent +from .searxng import SearXNGToolComponent +from .serp_api import SerpAPIComponent +from .tavily_search import TavilySearchToolComponent +from .wikidata_api import WikidataAPIComponent +from .wikipedia_api import WikipediaAPIComponent +from .wolfram_alpha_api import WolframAlphaAPIComponent +from .yahoo_finance import YfinanceToolComponent +from .youtube_transcripts import YouTubeTranscriptsComponent + +with warnings.catch_warnings(): + warnings.simplefilter("ignore", LangChainDeprecationWarning) + from .astradb import AstraDBToolComponent + from .astradb_cql import AstraDBCQLToolComponent __all__ = [ - "RetrieverToolComponent", + "AstraDBCQLToolComponent", + "AstraDBToolComponent", "BingSearchAPIComponent", + "CalculatorToolComponent", + "DuckDuckGoSearchComponent", + "GleanSearchAPIComponent", "GoogleSearchAPIComponent", "GoogleSerperAPIComponent", + "MetaphorToolkit", "PythonCodeStructuredTool", "PythonREPLToolComponent", + "RetrieverToolComponent", "SearchAPIComponent", "SearXNGToolComponent", "SerpAPIComponent", + "TavilySearchToolComponent", + "WikidataAPIComponent", "WikipediaAPIComponent", "WolframAlphaAPIComponent", + "YfinanceToolComponent", + "YouTubeTranscriptsComponent", ] diff --git a/src/backend/base/langflow/components/tools/astradb.py b/src/backend/base/langflow/components/tools/astradb.py new file mode 100644 index 000000000000..8355eb3550f3 --- /dev/null +++ b/src/backend/base/langflow/components/tools/astradb.py @@ -0,0 +1,153 @@ +from typing import Any + +from astrapy import Collection, DataAPIClient, Database +from langchain.pydantic_v1 import BaseModel, Field, create_model +from langchain_core.tools import StructuredTool + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.io import DictInput, IntInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class AstraDBToolComponent(LCToolComponent): + display_name: str = "Astra DB" + description: str = "Create a tool to get data from DataStax Astra DB Collection" + documentation: str = "https://astra.datastax.com" + icon: str = "AstraDB" + + inputs = [ + StrInput( + name="tool_name", + display_name="Tool Name", + info="The name of the tool.", + required=True, + ), + StrInput( + name="tool_description", + display_name="Tool Description", + info="The description of the tool.", + required=True, + ), + StrInput( + name="namespace", + display_name="Namespace Name", + info="The name of the namespace within Astra where the collection is be stored.", + value="default_keyspace", + advanced=True, + ), + StrInput( + name="collection_name", + display_name="Collection Name", + info="The name of the collection within Astra DB where the vectors will be stored.", + required=True, + ), + SecretStrInput( + name="token", + display_name="Astra DB Application Token", + info="Authentication token for accessing Astra DB.", + value="ASTRA_DB_APPLICATION_TOKEN", + required=True, + ), + StrInput( + name="api_endpoint", + display_name="API Endpoint", + info="API endpoint URL for the Astra DB service.", + value="ASTRA_DB_API_ENDPOINT", + required=True, + ), + StrInput( + name="projection_attributes", + display_name="Projection Attributes", + info="Attributes to return separated by comma.", + required=True, + value="*", + advanced=True, + ), + DictInput( + name="tool_params", + info="Attributes to filter and description to the model. Add ! for mandatory (e.g: !customerId)", + display_name="Tool params", + is_list=True, + ), + DictInput( + name="static_filters", + info="Attributes to filter and correspoding value", + display_name="Static filters", + is_list=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=5, + ), + ] + + _cached_client: DataAPIClient | None = None + _cached_db: Database | None = None + _cached_collection: Collection | None = None + + def _build_collection(self): + if self._cached_collection: + return self._cached_collection + + _cached_client = DataAPIClient(self.token) + _cached_db = _cached_client.get_database(self.api_endpoint, namespace=self.namespace) + self._cached_collection = _cached_db.get_collection(self.collection_name) + return self._cached_collection + + def create_args_schema(self) -> dict[str, BaseModel]: + args: dict[str, tuple[Any, Field] | list[str]] = {} + + for key in self.tool_params: + if key.startswith("!"): # Mandatory + args[key[1:]] = (str, Field(description=self.tool_params[key])) + else: # Optional + args[key] = (str | None, Field(description=self.tool_params[key], default=None)) + + model = create_model("ToolInput", **args, __base__=BaseModel) + return {"ToolInput": model} + + def build_tool(self) -> StructuredTool: + """Builds an Astra DB Collection tool. + + Returns: + Tool: The built Astra DB tool. + """ + schema_dict = self.create_args_schema() + + tool = StructuredTool.from_function( + name=self.tool_name, + args_schema=schema_dict["ToolInput"], + description=self.tool_description, + func=self.run_model, + return_direct=False, + ) + self.status = "Astra DB Tool created" + + return tool + + def projection_args(self, input_str: str) -> dict: + elements = input_str.split(",") + result = {} + + for element in elements: + if element.startswith("!"): + result[element[1:]] = False + else: + result[element] = True + + return result + + def run_model(self, **args) -> Data | list[Data]: + collection = self._build_collection() + results = collection.find( + ({**args, **self.static_filters}), + projection=self.projection_args(self.projection_attributes), + limit=self.number_of_results, + ) + + data: list[Data] = [Data(data=doc) for doc in results] + self.status = data + return data diff --git a/src/backend/base/langflow/components/tools/astradb_cql.py b/src/backend/base/langflow/components/tools/astradb_cql.py new file mode 100644 index 000000000000..5a7206e99278 --- /dev/null +++ b/src/backend/base/langflow/components/tools/astradb_cql.py @@ -0,0 +1,180 @@ +import urllib +from http import HTTPStatus +from typing import Any + +import requests +from langchain.pydantic_v1 import BaseModel, Field, create_model +from langchain_core.tools import StructuredTool + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.io import DictInput, IntInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class AstraDBCQLToolComponent(LCToolComponent): + display_name: str = "Astra DB CQL" + description: str = "Create a tool to get data from DataStax Astra DB CQL Table" + documentation: str = "https://astra.datastax.com" + icon: str = "AstraDB" + + inputs = [ + StrInput(name="tool_name", display_name="Tool Name", info="The name of the tool.", required=True), + StrInput( + name="tool_description", + display_name="Tool Description", + info="The tool description to be passed to the model.", + required=True, + ), + StrInput( + name="keyspace", + display_name="Keyspace", + value="default_keyspace", + info="The keyspace name within Astra DB where the data is stored.", + required=True, + ), + StrInput( + name="table_name", + display_name="Table Name", + info="The name of the table within Astra DB where the data is stored.", + required=True, + ), + SecretStrInput( + name="token", + display_name="Astra DB Application Token", + info="Authentication token for accessing Astra DB.", + value="ASTRA_DB_APPLICATION_TOKEN", + required=True, + ), + StrInput( + name="api_endpoint", + display_name="API Endpoint", + info="API endpoint URL for the Astra DB service.", + value="ASTRA_DB_API_ENDPOINT", + required=True, + ), + StrInput( + name="projection_fields", + display_name="Projection fields", + info="Attributes to return separated by comma.", + required=True, + value="*", + ), + DictInput( + name="partition_keys", + display_name="Partition Keys", + is_list=True, + info="Field name and description to the model", + required=True, + ), + DictInput( + name="clustering_keys", + display_name="Clustering Keys", + is_list=True, + info="Field name and description to the model", + ), + DictInput( + name="static_filters", + display_name="Static Filters", + is_list=True, + info="Field name and value. When filled, it will not be generated by the LLM.", + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=5, + ), + ] + + def astra_rest(self, args): + headers = {"Accept": "application/json", "X-Cassandra-Token": f"{self.token}"} + astra_url = f"{self.api_endpoint}/api/rest/v2/keyspaces/{self.keyspace}/{self.table_name}/" + key = [] + # Partition keys are mandatory + for k in self.partition_keys: + if k in args: + key.append(args[k]) + elif self.static_filters[k] is not None: + key.append(self.static_filters[k]) + else: + # TO-DO: Raise error - Missing information + key.append("none") + + # Clustering keys are optional + for k in self.clustering_keys: + if k in args: + key.append(args[k]) + elif self.static_filters[k] is not None: + key.append(self.static_filters[k]) + + url = f'{astra_url}{"/".join(key)}?page-size={self.number_of_results}' + + if self.projection_fields != "*": + url += f'&fields={urllib.parse.quote(self.projection_fields.replace(" ", ""))}' + + res = requests.request("GET", url=url, headers=headers, timeout=10) + + if int(res.status_code) >= HTTPStatus.BAD_REQUEST: + return res.text + + try: + res_data = res.json() + return res_data["data"] + except ValueError: + return res.status_code + + def create_args_schema(self) -> dict[str, BaseModel]: + args: dict[str, tuple[Any, Field]] = {} + + for key in self.partition_keys: + # Partition keys are mandatory is it doesn't have a static filter + if key not in self.static_filters: + args[key] = (str, Field(description=self.partition_keys[key])) + + for key in self.clustering_keys: + # Partition keys are mandatory if has the exclamation mark and doesn't have a static filter + if key not in self.static_filters: + if key.startswith("!"): # Mandatory + args[key[1:]] = (str, Field(description=self.clustering_keys[key])) + else: # Optional + args[key] = (str | None, Field(description=self.clustering_keys[key], default=None)) + + model = create_model("ToolInput", **args, __base__=BaseModel) + return {"ToolInput": model} + + def build_tool(self) -> StructuredTool: + """Builds a Astra DB CQL Table tool. + + Args: + name (str, optional): The name of the tool. + + Returns: + Tool: The built AstraDB tool. + """ + schema_dict = self.create_args_schema() + return StructuredTool.from_function( + name=self.tool_name, + args_schema=schema_dict["ToolInput"], + description=self.tool_description, + func=self.run_model, + return_direct=False, + ) + + def projection_args(self, input_str: str) -> dict: + elements = input_str.split(",") + result = {} + + for element in elements: + if element.startswith("!"): + result[element[1:]] = False + else: + result[element] = True + + return result + + def run_model(self, **args) -> Data | list[Data]: + results = self.astra_rest(args) + data: list[Data] = [Data(data=doc) for doc in results] + self.status = data + return results diff --git a/src/backend/base/langflow/components/tools/bing_search_api.py b/src/backend/base/langflow/components/tools/bing_search_api.py new file mode 100644 index 000000000000..d408f20c5a01 --- /dev/null +++ b/src/backend/base/langflow/components/tools/bing_search_api.py @@ -0,0 +1,46 @@ +from typing import cast + +from langchain_community.tools.bing_search import BingSearchResults +from langchain_community.utilities import BingSearchAPIWrapper + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import IntInput, MessageTextInput, MultilineInput, SecretStrInput +from langflow.schema import Data + + +class BingSearchAPIComponent(LCToolComponent): + display_name = "Bing Search API" + description = "Call the Bing Search API." + name = "BingSearchAPI" + + inputs = [ + SecretStrInput(name="bing_subscription_key", display_name="Bing Subscription Key"), + MultilineInput( + name="input_value", + display_name="Input", + ), + MessageTextInput(name="bing_search_url", display_name="Bing Search URL", advanced=True), + IntInput(name="k", display_name="Number of results", value=4, required=True), + ] + + def run_model(self) -> list[Data]: + if self.bing_search_url: + wrapper = BingSearchAPIWrapper( + bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key + ) + else: + wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) + results = wrapper.results(query=self.input_value, num_results=self.k) + data = [Data(data=result, text=result["snippet"]) for result in results] + self.status = data + return data + + def build_tool(self) -> Tool: + if self.bing_search_url: + wrapper = BingSearchAPIWrapper( + bing_search_url=self.bing_search_url, bing_subscription_key=self.bing_subscription_key + ) + else: + wrapper = BingSearchAPIWrapper(bing_subscription_key=self.bing_subscription_key) + return cast(Tool, BingSearchResults(api_wrapper=wrapper, num_results=self.k)) diff --git a/src/backend/base/langflow/components/tools/calculator.py b/src/backend/base/langflow/components/tools/calculator.py new file mode 100644 index 000000000000..ffe0ea00b5d5 --- /dev/null +++ b/src/backend/base/langflow/components/tools/calculator.py @@ -0,0 +1,98 @@ +import ast +import operator + +from langchain.tools import StructuredTool +from langchain_core.tools import ToolException +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MessageTextInput +from langflow.schema import Data + + +class CalculatorToolComponent(LCToolComponent): + display_name = "Calculator" + description = "Perform basic arithmetic operations on a given expression." + icon = "calculator" + name = "CalculatorTool" + + inputs = [ + MessageTextInput( + name="expression", + display_name="Expression", + info="The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", + tool_mode=True, + ), + ] + + class CalculatorToolSchema(BaseModel): + expression: str = Field(..., description="The arithmetic expression to evaluate.") + + def run_model(self) -> list[Data]: + return self._evaluate_expression(self.expression) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="calculator", + description="Evaluate basic arithmetic expressions. Input should be a string containing the expression.", + func=self._eval_expr_with_error, + args_schema=self.CalculatorToolSchema, + ) + + def _eval_expr(self, node): + # Define the allowed operators + operators = { + ast.Add: operator.add, + ast.Sub: operator.sub, + ast.Mult: operator.mul, + ast.Div: operator.truediv, + ast.Pow: operator.pow, + } + if isinstance(node, ast.Num): + return node.n + if isinstance(node, ast.BinOp): + return operators[type(node.op)](self._eval_expr(node.left), self._eval_expr(node.right)) + if isinstance(node, ast.UnaryOp): + return operators[type(node.op)](self._eval_expr(node.operand)) + if isinstance(node, ast.Call): + msg = ( + "Function calls like sqrt(), sin(), cos() etc. are not supported. " + "Only basic arithmetic operations (+, -, *, /, **) are allowed." + ) + raise TypeError(msg) + msg = f"Unsupported operation or expression type: {type(node).__name__}" + raise TypeError(msg) + + def _eval_expr_with_error(self, expression: str) -> list[Data]: + try: + return self._evaluate_expression(expression) + except Exception as e: + raise ToolException(str(e)) from e + + def _evaluate_expression(self, expression: str) -> list[Data]: + try: + # Parse the expression and evaluate it + tree = ast.parse(expression, mode="eval") + result = self._eval_expr(tree.body) + + # Format the result to a reasonable number of decimal places + formatted_result = f"{result:.6f}".rstrip("0").rstrip(".") + + self.status = formatted_result + return [Data(data={"result": formatted_result})] + + except (SyntaxError, TypeError, KeyError) as e: + error_message = f"Invalid expression: {e}" + self.status = error_message + return [Data(data={"error": error_message, "input": expression})] + except ZeroDivisionError: + error_message = "Error: Division by zero" + self.status = error_message + return [Data(data={"error": error_message, "input": expression})] + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error evaluating expression") + error_message = f"Error: {e}" + self.status = error_message + return [Data(data={"error": error_message, "input": expression})] diff --git a/src/backend/base/langflow/components/tools/duck_duck_go_search_run.py b/src/backend/base/langflow/components/tools/duck_duck_go_search_run.py new file mode 100644 index 000000000000..02aae55044f3 --- /dev/null +++ b/src/backend/base/langflow/components/tools/duck_duck_go_search_run.py @@ -0,0 +1,76 @@ +from typing import Any + +from langchain.tools import StructuredTool +from langchain_community.tools import DuckDuckGoSearchRun +from langchain_core.tools import ToolException +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import IntInput, MessageTextInput +from langflow.schema import Data + + +class DuckDuckGoSearchComponent(LCToolComponent): + display_name: str = "DuckDuckGo Search" + description: str = "Perform web searches using the DuckDuckGo search engine with result limiting" + name = "DuckDuckGoSearch" + documentation: str = "https://python.langchain.com/docs/integrations/tools/ddg" + icon: str = "DuckDuckGo" + inputs = [ + MessageTextInput( + name="input_value", + display_name="Search Query", + required=True, + ), + IntInput(name="max_results", display_name="Max Results", value=5, advanced=True), + IntInput(name="max_snippet_length", display_name="Max Snippet Length", value=100, advanced=True), + ] + + class DuckDuckGoSearchSchema(BaseModel): + query: str = Field(..., description="The search query") + max_results: int = Field(5, description="Maximum number of results to return") + max_snippet_length: int = Field(100, description="Maximum length of each result snippet") + + def _build_wrapper(self): + return DuckDuckGoSearchRun() + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + + def search_func(query: str, max_results: int = 5, max_snippet_length: int = 100) -> list[dict[str, Any]]: + try: + full_results = wrapper.run(f"{query} (site:*)") + result_list = full_results.split("\n")[:max_results] + limited_results = [] + for result in result_list: + limited_result = { + "snippet": result[:max_snippet_length], + } + limited_results.append(limited_result) + except Exception as e: + msg = f"Error in DuckDuckGo Search: {e!s}" + raise ToolException(msg) from e + return limited_results + + tool = StructuredTool.from_function( + name="duckduckgo_search", + description="Search for recent results using DuckDuckGo with result limiting", + func=search_func, + args_schema=self.DuckDuckGoSearchSchema, + ) + self.status = "DuckDuckGo Search Tool created" + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + results = tool.run( + { + "query": self.input_value, + "max_results": self.max_results, + "max_snippet_length": self.max_snippet_length, + } + ) + data_list = [Data(data=result, text=result.get("snippet", "")) for result in results] + self.status = data_list # type: ignore[assignment] + return data_list diff --git a/src/backend/base/langflow/components/tools/glean_search_api.py b/src/backend/base/langflow/components/tools/glean_search_api.py new file mode 100644 index 000000000000..6c0317f608bb --- /dev/null +++ b/src/backend/base/langflow/components/tools/glean_search_api.py @@ -0,0 +1,158 @@ +import json +from typing import Any +from urllib.parse import urljoin + +import httpx +from langchain_core.tools import StructuredTool, ToolException +from pydantic import BaseModel +from pydantic.v1 import Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import IntInput, MultilineInput, NestedDictInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class GleanSearchAPISchema(BaseModel): + query: str = Field(..., description="The search query") + page_size: int = Field(10, description="Maximum number of results to return") + request_options: dict[str, Any] | None = Field(default_factory=dict, description="Request Options") + + +class GleanAPIWrapper(BaseModel): + """Wrapper around Glean API.""" + + glean_api_url: str + glean_access_token: str + act_as: str = "langflow-component@datastax.com" # TODO: Detect this + + def _prepare_request( + self, + query: str, + page_size: int = 10, + request_options: dict[str, Any] | None = None, + ) -> dict: + # Ensure there's a trailing slash + url = self.glean_api_url + if not url.endswith("/"): + url += "/" + + return { + "url": urljoin(url, "search"), + "headers": { + "Authorization": f"Bearer {self.glean_access_token}", + "X-Scio-ActAs": self.act_as, + }, + "payload": { + "query": query, + "pageSize": page_size, + "requestOptions": request_options, + }, + } + + def results(self, query: str, **kwargs: Any) -> list[dict[str, Any]]: + results = self._search_api_results(query, **kwargs) + + if len(results) == 0: + msg = "No good Glean Search Result was found" + raise AssertionError(msg) + + return results + + def run(self, query: str, **kwargs: Any) -> list[dict[str, Any]]: + try: + results = self.results(query, **kwargs) + + processed_results = [] + for result in results: + if "title" in result: + result["snippets"] = result.get("snippets", [{"snippet": {"text": result["title"]}}]) + if "text" not in result["snippets"][0]: + result["snippets"][0]["text"] = result["title"] + + processed_results.append(result) + except Exception as e: + error_message = f"Error in Glean Search API: {e!s}" + raise ToolException(error_message) from e + + return processed_results + + def _search_api_results(self, query: str, **kwargs: Any) -> list[dict[str, Any]]: + request_details = self._prepare_request(query, **kwargs) + + response = httpx.post( + request_details["url"], + json=request_details["payload"], + headers=request_details["headers"], + ) + + response.raise_for_status() + response_json = response.json() + + return response_json.get("results", []) + + @staticmethod + def _result_as_string(result: dict) -> str: + return json.dumps(result, indent=4) + + +class GleanSearchAPIComponent(LCToolComponent): + display_name = "Glean Search API" + description = "Call Glean Search API" + name = "GleanAPI" + + inputs = [ + StrInput( + name="glean_api_url", + display_name="Glean API URL", + required=True, + ), + SecretStrInput(name="glean_access_token", display_name="Glean Access Token", required=True), + MultilineInput(name="query", display_name="Query", required=True), + IntInput(name="page_size", display_name="Page Size", value=10), + NestedDictInput(name="request_options", display_name="Request Options", required=False), + ] + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper( + glean_api_url=self.glean_api_url, + glean_access_token=self.glean_access_token, + ) + + tool = StructuredTool.from_function( + name="glean_search_api", + description="Search Glean for relevant results.", + func=wrapper.run, + args_schema=GleanSearchAPISchema, + ) + + self.status = "Glean Search API Tool for Langchain" + + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + + results = tool.run( + { + "query": self.query, + "page_size": self.page_size, + "request_options": self.request_options, + } + ) + + # Build the data + data = [Data(data=result, text=result["snippets"][0]["text"]) for result in results] + self.status = data # type: ignore[assignment] + + return data + + def _build_wrapper( + self, + glean_api_url: str, + glean_access_token: str, + ): + return GleanAPIWrapper( + glean_api_url=glean_api_url, + glean_access_token=glean_access_token, + ) diff --git a/src/backend/base/langflow/components/tools/google_search_api.py b/src/backend/base/langflow/components/tools/google_search_api.py new file mode 100644 index 000000000000..7df0fef5fe6d --- /dev/null +++ b/src/backend/base/langflow/components/tools/google_search_api.py @@ -0,0 +1,44 @@ +from langchain_core.tools import Tool + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.inputs import IntInput, MultilineInput, SecretStrInput +from langflow.schema import Data + + +class GoogleSearchAPIComponent(LCToolComponent): + display_name = "Google Search API" + description = "Call Google Search API." + name = "GoogleSearchAPI" + + inputs = [ + SecretStrInput(name="google_api_key", display_name="Google API Key", required=True), + SecretStrInput(name="google_cse_id", display_name="Google CSE ID", required=True), + MultilineInput( + name="input_value", + display_name="Input", + ), + IntInput(name="k", display_name="Number of results", value=4, required=True), + ] + + def run_model(self) -> Data | list[Data]: + wrapper = self._build_wrapper() + results = wrapper.results(query=self.input_value, num_results=self.k) + data = [Data(data=result, text=result["snippet"]) for result in results] + self.status = data + return data + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + return Tool( + name="google_search", + description="Search Google for recent results.", + func=wrapper.run, + ) + + def _build_wrapper(self): + try: + from langchain_google_community import GoogleSearchAPIWrapper + except ImportError as e: + msg = "Please install langchain-google-community to use GoogleSearchAPIWrapper." + raise ImportError(msg) from e + return GoogleSearchAPIWrapper(google_api_key=self.google_api_key, google_cse_id=self.google_cse_id, k=self.k) diff --git a/src/backend/base/langflow/components/tools/google_serper_api.py b/src/backend/base/langflow/components/tools/google_serper_api.py new file mode 100644 index 000000000000..a7f53ce07a2d --- /dev/null +++ b/src/backend/base/langflow/components/tools/google_serper_api.py @@ -0,0 +1,40 @@ +from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import IntInput, MultilineInput, SecretStrInput +from langflow.schema import Data + + +class GoogleSerperAPIComponent(LCToolComponent): + display_name = "Google Serper API" + description = "Call the Serper.dev Google Search API." + name = "GoogleSerperAPI" + + inputs = [ + SecretStrInput(name="serper_api_key", display_name="Serper API Key", required=True), + MultilineInput( + name="input_value", + display_name="Input", + ), + IntInput(name="k", display_name="Number of results", value=4, required=True), + ] + + def run_model(self) -> Data | list[Data]: + wrapper = self._build_wrapper() + results = wrapper.results(query=self.input_value) + list_results = results.get("organic", []) + data = [Data(data=result, text=result["snippet"]) for result in list_results] + self.status = data + return data + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + return Tool( + name="google_search", + description="Search Google for recent results.", + func=wrapper.run, + ) + + def _build_wrapper(self): + return GoogleSerperAPIWrapper(serper_api_key=self.serper_api_key, k=self.k) diff --git a/src/backend/base/langflow/components/tools/metaphor.py b/src/backend/base/langflow/components/tools/metaphor.py new file mode 100644 index 000000000000..ad4c97159429 --- /dev/null +++ b/src/backend/base/langflow/components/tools/metaphor.py @@ -0,0 +1,67 @@ +from langchain_core.tools import tool +from metaphor_python import Metaphor + +from langflow.custom import Component +from langflow.field_typing import Tool +from langflow.io import BoolInput, IntInput, Output, SecretStrInput + + +class MetaphorToolkit(Component): + display_name = "Exa Search" + description = "Exa Search toolkit for search and content retrieval" + documentation = "https://python.langchain.com/docs/integrations/tools/metaphor_search" + beta = True + name = "ExaSearch" + + inputs = [ + SecretStrInput( + name="metaphor_api_key", + display_name="Exa Search API Key", + password=True, + ), + BoolInput( + name="use_autoprompt", + display_name="Use Autoprompt", + value=True, + ), + IntInput( + name="search_num_results", + display_name="Search Number of Results", + value=5, + ), + IntInput( + name="similar_num_results", + display_name="Similar Number of Results", + value=5, + ), + ] + + outputs = [ + Output(name="tools", display_name="Tools", method="build_toolkit"), + ] + + def build_toolkit(self) -> Tool: + client = Metaphor(api_key=self.metaphor_api_key) + + @tool + def search(query: str): + """Call search engine with a query.""" + return client.search(query, use_autoprompt=self.use_autoprompt, num_results=self.search_num_results) + + @tool + def get_contents(ids: list[str]): + """Get contents of a webpage. + + The ids passed in should be a list of ids as fetched from `search`. + """ + return client.get_contents(ids) + + @tool + def find_similar(url: str): + """Get search results similar to a given URL. + + The url passed in should be a URL returned from `search` + """ + return client.find_similar(url, num_results=self.similar_num_results) + + return [search, get_contents, find_similar] diff --git a/src/backend/base/langflow/components/tools/python_code_structured_tool.py b/src/backend/base/langflow/components/tools/python_code_structured_tool.py new file mode 100644 index 000000000000..2e7006113c96 --- /dev/null +++ b/src/backend/base/langflow/components/tools/python_code_structured_tool.py @@ -0,0 +1,332 @@ +import ast +import json +from typing import Any + +from langchain.agents import Tool +from langchain_core.tools import StructuredTool +from loguru import logger +from pydantic.v1 import Field, create_model +from pydantic.v1.fields import Undefined +from typing_extensions import override + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.inputs.inputs import ( + BoolInput, + DropdownInput, + FieldTypes, + HandleInput, + MessageTextInput, + MultilineInput, +) +from langflow.io import Output +from langflow.schema import Data +from langflow.schema.dotdict import dotdict + + +class PythonCodeStructuredTool(LCToolComponent): + DEFAULT_KEYS = [ + "code", + "_type", + "text_key", + "tool_code", + "tool_name", + "tool_description", + "return_direct", + "tool_function", + "global_variables", + "_classes", + "_functions", + ] + display_name = "Python Code Structured" + description = "structuredtool dataclass code to tool" + documentation = "https://python.langchain.com/docs/modules/tools/custom_tools/#structuredtool-dataclass" + name = "PythonCodeStructuredTool" + icon = "🐍" + field_order = ["name", "description", "tool_code", "return_direct", "tool_function"] + legacy: bool = True + + inputs = [ + MultilineInput( + name="tool_code", + display_name="Tool Code", + info="Enter the dataclass code.", + placeholder="def my_function(args):\n pass", + required=True, + real_time_refresh=True, + refresh_button=True, + ), + MessageTextInput( + name="tool_name", + display_name="Tool Name", + info="Enter the name of the tool.", + required=True, + ), + MessageTextInput( + name="tool_description", + display_name="Description", + info="Enter the description of the tool.", + required=True, + ), + BoolInput( + name="return_direct", + display_name="Return Directly", + info="Should the tool return the function output directly?", + ), + DropdownInput( + name="tool_function", + display_name="Tool Function", + info="Select the function for additional expressions.", + options=[], + required=True, + real_time_refresh=True, + refresh_button=True, + ), + HandleInput( + name="global_variables", + display_name="Global Variables", + info="Enter the global variables or Create Data Component.", + input_types=["Data"], + field_type=FieldTypes.DICT, + is_list=True, + ), + MessageTextInput(name="_classes", display_name="Classes", advanced=True), + MessageTextInput(name="_functions", display_name="Functions", advanced=True), + ] + + outputs = [ + Output(display_name="Tool", name="result_tool", method="build_tool"), + ] + + @override + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict: + if field_name is None: + return build_config + + if field_name not in {"tool_code", "tool_function"}: + return build_config + + try: + named_functions = {} + [classes, functions] = self._parse_code(build_config["tool_code"]["value"]) + existing_fields = {} + if len(build_config) > len(self.DEFAULT_KEYS): + for key in build_config.copy(): + if key not in self.DEFAULT_KEYS: + existing_fields[key] = build_config.pop(key) + + names = [] + for func in functions: + named_functions[func["name"]] = func + names.append(func["name"]) + + for arg in func["args"]: + field_name = f"{func['name']}|{arg['name']}" + if field_name in existing_fields: + build_config[field_name] = existing_fields[field_name] + continue + + field = MessageTextInput( + display_name=f"{arg['name']}: Description", + name=field_name, + info=f"Enter the description for {arg['name']}", + required=True, + ) + build_config[field_name] = field.to_dict() + build_config["_functions"]["value"] = json.dumps(named_functions) + build_config["_classes"]["value"] = json.dumps(classes) + build_config["tool_function"]["options"] = names + except Exception as e: # noqa: BLE001 + self.status = f"Failed to extract names: {e}" + logger.opt(exception=True).debug(self.status) + build_config["tool_function"]["options"] = ["Failed to parse", str(e)] + return build_config + + async def build_tool(self) -> Tool: + _local_namespace = {} # type: ignore[var-annotated] + modules = self._find_imports(self.tool_code) + import_code = "" + for module in modules["imports"]: + import_code += f"global {module}\nimport {module}\n" + for from_module in modules["from_imports"]: + for alias in from_module.names: + import_code += f"global {alias.name}\n" + import_code += ( + f"from {from_module.module} import {', '.join([alias.name for alias in from_module.names])}\n" + ) + exec(import_code, globals()) + exec(self.tool_code, globals(), _local_namespace) + + class PythonCodeToolFunc: + params: dict = {} + + def run(**kwargs): + for key, arg in kwargs.items(): + if key not in PythonCodeToolFunc.params: + PythonCodeToolFunc.params[key] = arg + return _local_namespace[self.tool_function](**PythonCodeToolFunc.params) + + _globals = globals() + _local = {} + _local[self.tool_function] = PythonCodeToolFunc + _globals.update(_local) + + if isinstance(self.global_variables, list): + for data in self.global_variables: + if isinstance(data, Data): + _globals.update(data.data) + elif isinstance(self.global_variables, dict): + _globals.update(self.global_variables) + + classes = json.loads(self._attributes["_classes"]) + for class_dict in classes: + exec("\n".join(class_dict["code"]), _globals) + + named_functions = json.loads(self._attributes["_functions"]) + schema_fields = {} + + for attr in self._attributes: + if attr in self.DEFAULT_KEYS: + continue + + func_name = attr.split("|")[0] + field_name = attr.split("|")[1] + func_arg = self._find_arg(named_functions, func_name, field_name) + if func_arg is None: + msg = f"Failed to find arg: {field_name}" + raise ValueError(msg) + + field_annotation = func_arg["annotation"] + field_description = self._get_value(self._attributes[attr], str) + + if field_annotation: + exec(f"temp_annotation_type = {field_annotation}", _globals) + schema_annotation = _globals["temp_annotation_type"] + else: + schema_annotation = Any + schema_fields[field_name] = ( + schema_annotation, + Field( + default=func_arg.get("default", Undefined), + description=field_description, + ), + ) + + if "temp_annotation_type" in _globals: + _globals.pop("temp_annotation_type") + + python_code_tool_schema = None + if schema_fields: + python_code_tool_schema = create_model("PythonCodeToolSchema", **schema_fields) + + return StructuredTool.from_function( + func=_local[self.tool_function].run, + args_schema=python_code_tool_schema, + name=self.tool_name, + description=self.tool_description, + return_direct=self.return_direct, + ) + + def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): + """This function is called after the code validation is done.""" + frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) + frontend_node["template"] = self.update_build_config( + frontend_node["template"], + frontend_node["template"]["tool_code"]["value"], + "tool_code", + ) + frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) + for key in frontend_node["template"]: + if key in self.DEFAULT_KEYS: + continue + frontend_node["template"] = self.update_build_config( + frontend_node["template"], frontend_node["template"][key]["value"], key + ) + frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node) + return frontend_node + + def _parse_code(self, code: str) -> tuple[list[dict], list[dict]]: + parsed_code = ast.parse(code) + lines = code.split("\n") + classes = [] + functions = [] + for node in parsed_code.body: + if isinstance(node, ast.ClassDef): + class_lines = lines[node.lineno - 1 : node.end_lineno] + class_lines[-1] = class_lines[-1][: node.end_col_offset] + class_lines[0] = class_lines[0][node.col_offset :] + classes.append( + { + "name": node.name, + "code": class_lines, + } + ) + continue + + if not isinstance(node, ast.FunctionDef): + continue + + func = {"name": node.name, "args": []} + for arg in node.args.args: + if arg.lineno != arg.end_lineno: + msg = "Multiline arguments are not supported" + raise ValueError(msg) + + func_arg = { + "name": arg.arg, + "annotation": None, + } + + for default in node.args.defaults: + if ( + arg.lineno > default.lineno + or arg.col_offset > default.col_offset + or ( + arg.end_lineno is not None + and default.end_lineno is not None + and arg.end_lineno < default.end_lineno + ) + or ( + arg.end_col_offset is not None + and default.end_col_offset is not None + and arg.end_col_offset < default.end_col_offset + ) + ): + continue + + if isinstance(default, ast.Name): + func_arg["default"] = default.id + elif isinstance(default, ast.Constant): + func_arg["default"] = default.value + + if arg.annotation: + annotation_line = lines[arg.annotation.lineno - 1] + annotation_line = annotation_line[: arg.annotation.end_col_offset] + annotation_line = annotation_line[arg.annotation.col_offset :] + func_arg["annotation"] = annotation_line + if isinstance(func_arg["annotation"], str) and func_arg["annotation"].count("=") > 0: + func_arg["annotation"] = "=".join(func_arg["annotation"].split("=")[:-1]).strip() + if isinstance(func["args"], list): + func["args"].append(func_arg) + functions.append(func) + + return classes, functions + + def _find_imports(self, code: str) -> dotdict: + imports: list[str] = [] + from_imports = [] + parsed_code = ast.parse(code) + for node in parsed_code.body: + if isinstance(node, ast.Import): + imports.extend(alias.name for alias in node.names) + elif isinstance(node, ast.ImportFrom): + from_imports.append(node) + return dotdict({"imports": imports, "from_imports": from_imports}) + + def _get_value(self, value: Any, annotation: Any) -> Any: + return value if isinstance(value, annotation) else value["value"] + + def _find_arg(self, named_functions: dict, func_name: str, arg_name: str) -> dict | None: + for arg in named_functions[func_name]["args"]: + if arg["name"] == arg_name: + return arg + return None diff --git a/src/backend/base/langflow/components/tools/python_repl.py b/src/backend/base/langflow/components/tools/python_repl.py new file mode 100644 index 000000000000..25f64895f063 --- /dev/null +++ b/src/backend/base/langflow/components/tools/python_repl.py @@ -0,0 +1,95 @@ +import importlib + +from langchain.tools import StructuredTool +from langchain_core.tools import ToolException +from langchain_experimental.utilities import PythonREPL +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import StrInput +from langflow.schema import Data + + +class PythonREPLToolComponent(LCToolComponent): + display_name = "Python REPL" + description = "A tool for running Python code in a REPL environment." + name = "PythonREPLTool" + + inputs = [ + StrInput( + name="name", + display_name="Tool Name", + info="The name of the tool.", + value="python_repl", + ), + StrInput( + name="description", + display_name="Tool Description", + info="A description of the tool.", + value="A Python shell. Use this to execute python commands. " + "Input should be a valid python command. " + "If you want to see the output of a value, you should print it out with `print(...)`.", + ), + StrInput( + name="global_imports", + display_name="Global Imports", + info="A comma-separated list of modules to import globally, e.g. 'math,numpy'.", + value="math", + ), + StrInput( + name="code", + display_name="Python Code", + info="The Python code to execute.", + value="print('Hello, World!')", + ), + ] + + class PythonREPLSchema(BaseModel): + code: str = Field(..., description="The Python code to execute.") + + def get_globals(self, global_imports: str | list[str]) -> dict: + global_dict = {} + if isinstance(global_imports, str): + modules = [module.strip() for module in global_imports.split(",")] + elif isinstance(global_imports, list): + modules = global_imports + else: + msg = "global_imports must be either a string or a list" + raise TypeError(msg) + + for module in modules: + try: + imported_module = importlib.import_module(module) + global_dict[imported_module.__name__] = imported_module + except ImportError as e: + msg = f"Could not import module {module}" + raise ImportError(msg) from e + return global_dict + + def build_tool(self) -> Tool: + _globals = self.get_globals(self.global_imports) + python_repl = PythonREPL(_globals=_globals) + + def run_python_code(code: str) -> str: + try: + return python_repl.run(code) + except Exception as e: + logger.opt(exception=True).debug("Error running Python code") + raise ToolException(str(e)) from e + + tool = StructuredTool.from_function( + name=self.name, + description=self.description, + func=run_python_code, + args_schema=self.PythonREPLSchema, + ) + + self.status = f"Python REPL Tool created with global imports: {self.global_imports}" + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + result = tool.run(self.code) + return [Data(data={"result": result})] diff --git a/src/backend/base/langflow/components/tools/retriever.py b/src/backend/base/langflow/components/tools/retriever.py new file mode 100644 index 000000000000..051439b700fc --- /dev/null +++ b/src/backend/base/langflow/components/tools/retriever.py @@ -0,0 +1,30 @@ +from langchain_core.tools import create_retriever_tool + +from langflow.custom import CustomComponent +from langflow.field_typing import BaseRetriever, Tool + + +class RetrieverToolComponent(CustomComponent): + display_name = "RetrieverTool" + description = "Tool for interacting with retriever" + name = "RetrieverTool" + + def build_config(self): + return { + "retriever": { + "display_name": "Retriever", + "info": "Retriever to interact with", + "type": BaseRetriever, + "input_types": ["Retriever"], + }, + "name": {"display_name": "Name", "info": "Name of the tool"}, + "description": {"display_name": "Description", "info": "Description of the tool"}, + } + + def build(self, retriever: BaseRetriever, name: str, description: str, **kwargs) -> Tool: + _ = kwargs + return create_retriever_tool( + retriever=retriever, + name=name, + description=description, + ) diff --git a/src/backend/base/langflow/components/tools/search_api.py b/src/backend/base/langflow/components/tools/search_api.py new file mode 100644 index 000000000000..f6c24936ea5a --- /dev/null +++ b/src/backend/base/langflow/components/tools/search_api.py @@ -0,0 +1,85 @@ +from typing import Any + +from langchain.tools import StructuredTool +from langchain_community.utilities.searchapi import SearchApiAPIWrapper +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import DictInput, IntInput, MessageTextInput, MultilineInput, SecretStrInput +from langflow.schema import Data + + +class SearchAPIComponent(LCToolComponent): + display_name: str = "Search API" + description: str = "Call the searchapi.io API with result limiting" + name = "SearchAPI" + documentation: str = "https://www.searchapi.io/docs/google" + + inputs = [ + MessageTextInput(name="engine", display_name="Engine", value="google"), + SecretStrInput(name="api_key", display_name="SearchAPI API Key", required=True), + MultilineInput( + name="input_value", + display_name="Input", + ), + DictInput(name="search_params", display_name="Search parameters", advanced=True, is_list=True), + IntInput(name="max_results", display_name="Max Results", value=5, advanced=True), + IntInput(name="max_snippet_length", display_name="Max Snippet Length", value=100, advanced=True), + ] + + class SearchAPISchema(BaseModel): + query: str = Field(..., description="The search query") + params: dict[str, Any] = Field(default_factory=dict, description="Additional search parameters") + max_results: int = Field(5, description="Maximum number of results to return") + max_snippet_length: int = Field(100, description="Maximum length of each result snippet") + + def _build_wrapper(self): + return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key) + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + + def search_func( + query: str, params: dict[str, Any] | None = None, max_results: int = 5, max_snippet_length: int = 100 + ) -> list[dict[str, Any]]: + params = params or {} + full_results = wrapper.results(query=query, **params) + organic_results = full_results.get("organic_results", [])[:max_results] + + limited_results = [] + for result in organic_results: + limited_result = { + "title": result.get("title", "")[:max_snippet_length], + "link": result.get("link", ""), + "snippet": result.get("snippet", "")[:max_snippet_length], + } + limited_results.append(limited_result) + + return limited_results + + tool = StructuredTool.from_function( + name="search_api", + description="Search for recent results using searchapi.io with result limiting", + func=search_func, + args_schema=self.SearchAPISchema, + ) + + self.status = f"Search API Tool created with engine: {self.engine}" + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + results = tool.run( + { + "query": self.input_value, + "params": self.search_params or {}, + "max_results": self.max_results, + "max_snippet_length": self.max_snippet_length, + } + ) + + data_list = [Data(data=result, text=result.get("snippet", "")) for result in results] + + self.status = data_list + return data_list diff --git a/src/backend/base/langflow/components/tools/searxng.py b/src/backend/base/langflow/components/tools/searxng.py new file mode 100644 index 000000000000..392d1b342408 --- /dev/null +++ b/src/backend/base/langflow/components/tools/searxng.py @@ -0,0 +1,145 @@ +import json +from collections.abc import Sequence +from typing import Any + +import requests +from langchain.agents import Tool +from langchain_core.tools import StructuredTool +from loguru import logger +from pydantic.v1 import Field, create_model + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.inputs import DropdownInput, IntInput, MessageTextInput, MultiselectInput +from langflow.io import Output +from langflow.schema.dotdict import dotdict + + +class SearXNGToolComponent(LCToolComponent): + search_headers: dict = {} + display_name = "SearXNG Search" + description = "A component that searches for tools using SearXNG." + name = "SearXNGTool" + legacy: bool = True + + inputs = [ + MessageTextInput( + name="url", + display_name="URL", + value="http://localhost", + required=True, + refresh_button=True, + ), + IntInput( + name="max_results", + display_name="Max Results", + value=10, + required=True, + ), + MultiselectInput( + name="categories", + display_name="Categories", + options=[], + value=[], + ), + DropdownInput( + name="language", + display_name="Language", + options=[], + ), + ] + + outputs = [ + Output(display_name="Tool", name="result_tool", method="build_tool"), + ] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict: + if field_name is None: + return build_config + + if field_name != "url": + return build_config + + try: + url = f"{field_value}/config" + + response = requests.get(url=url, headers=self.search_headers.copy(), timeout=10) + data = None + if response.headers.get("Content-Encoding") == "zstd": + data = json.loads(response.content) + else: + data = response.json() + build_config["categories"]["options"] = data["categories"].copy() + for selected_category in build_config["categories"]["value"]: + if selected_category not in build_config["categories"]["options"]: + build_config["categories"]["value"].remove(selected_category) + languages = list(data["locales"]) + build_config["language"]["options"] = languages.copy() + except Exception as e: # noqa: BLE001 + self.status = f"Failed to extract names: {e}" + logger.opt(exception=True).debug(self.status) + build_config["categories"]["options"] = ["Failed to parse", str(e)] + return build_config + + def build_tool(self) -> Tool: + class SearxSearch: + _url: str = "" + _categories: list[str] = [] + _language: str = "" + _headers: dict = {} + _max_results: int = 10 + + @staticmethod + def search(query: str, categories: Sequence[str] = ()) -> list: + if not SearxSearch._categories and not categories: + msg = "No categories provided." + raise ValueError(msg) + all_categories = SearxSearch._categories + list(set(categories) - set(SearxSearch._categories)) + try: + url = f"{SearxSearch._url}/" + headers = SearxSearch._headers.copy() + response = requests.get( + url=url, + headers=headers, + params={ + "q": query, + "categories": ",".join(all_categories), + "language": SearxSearch._language, + "format": "json", + }, + timeout=10, + ).json() + + num_results = min(SearxSearch._max_results, len(response["results"])) + return [response["results"][i] for i in range(num_results)] + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error running SearXNG Search") + return [f"Failed to search: {e}"] + + SearxSearch._url = self.url + SearxSearch._categories = self.categories.copy() + SearxSearch._language = self.language + SearxSearch._headers = self.search_headers.copy() + SearxSearch._max_results = self.max_results + + _globals = globals() + _local = {} + _local["SearxSearch"] = SearxSearch + _globals.update(_local) + + schema_fields = { + "query": (str, Field(..., description="The query to search for.")), + "categories": ( + list[str], + Field(default=[], description="The categories to search in."), + ), + } + + searx_search_schema = create_model("SearxSearchSchema", **schema_fields) + + return StructuredTool.from_function( + func=_local["SearxSearch"].search, + args_schema=searx_search_schema, + name="searxng_search_tool", + description="A tool that searches for tools using SearXNG.\nThe available categories are: " + + ", ".join(self.categories), + ) diff --git a/src/backend/base/langflow/components/tools/serp_api.py b/src/backend/base/langflow/components/tools/serp_api.py new file mode 100644 index 000000000000..38ce52055272 --- /dev/null +++ b/src/backend/base/langflow/components/tools/serp_api.py @@ -0,0 +1,117 @@ +from typing import Any + +from langchain.tools import StructuredTool +from langchain_community.utilities.serpapi import SerpAPIWrapper +from langchain_core.tools import ToolException +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import DictInput, IntInput, MultilineInput, SecretStrInput +from langflow.schema import Data + + +class SerpAPISchema(BaseModel): + """Schema for SerpAPI search parameters.""" + + query: str = Field(..., description="The search query") + params: dict[str, Any] | None = Field( + default={ + "engine": "google", + "google_domain": "google.com", + "gl": "us", + "hl": "en", + }, + description="Additional search parameters", + ) + max_results: int = Field(5, description="Maximum number of results to return") + max_snippet_length: int = Field(100, description="Maximum length of each result snippet") + + +class SerpAPIComponent(LCToolComponent): + display_name = "Serp Search API" + description = "Call Serp Search API with result limiting" + name = "SerpAPI" + + inputs = [ + SecretStrInput(name="serpapi_api_key", display_name="SerpAPI API Key", required=True), + MultilineInput( + name="input_value", + display_name="Input", + ), + DictInput(name="search_params", display_name="Parameters", advanced=True, is_list=True), + IntInput(name="max_results", display_name="Max Results", value=5, advanced=True), + IntInput(name="max_snippet_length", display_name="Max Snippet Length", value=100, advanced=True), + ] + + def _build_wrapper(self, params: dict[str, Any] | None = None) -> SerpAPIWrapper: + """Build a SerpAPIWrapper with the provided parameters.""" + params = params or {} + if params: + return SerpAPIWrapper( + serpapi_api_key=self.serpapi_api_key, + params=params, + ) + return SerpAPIWrapper(serpapi_api_key=self.serpapi_api_key) + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper(self.search_params) # noqa: F841 + + def search_func( + query: str, params: dict[str, Any] | None = None, max_results: int = 5, max_snippet_length: int = 100 + ) -> list[dict[str, Any]]: + try: + # rebuild the wrapper if params are provided + if params: + wrapper = self._build_wrapper(params) + + full_results = wrapper.results(query) + organic_results = full_results.get("organic_results", [])[:max_results] + + limited_results = [] + for result in organic_results: + limited_result = { + "title": result.get("title", "")[:max_snippet_length], + "link": result.get("link", ""), + "snippet": result.get("snippet", "")[:max_snippet_length], + } + limited_results.append(limited_result) + + except Exception as e: + error_message = f"Error in SerpAPI search: {e!s}" + logger.debug(error_message) + raise ToolException(error_message) from e + return limited_results + + tool = StructuredTool.from_function( + name="serp_search_api", + description="Search for recent results using SerpAPI with result limiting", + func=search_func, + args_schema=SerpAPISchema, + ) + + self.status = "SerpAPI Tool created" + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + try: + results = tool.run( + { + "query": self.input_value, + "params": self.search_params or {}, + "max_results": self.max_results, + "max_snippet_length": self.max_snippet_length, + } + ) + + data_list = [Data(data=result, text=result.get("snippet", "")) for result in results] + + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error running SerpAPI") + self.status = f"Error: {e}" + return [Data(data={"error": str(e)}, text=str(e))] + + self.status = data_list # type: ignore[assignment] + return data_list diff --git a/src/backend/base/langflow/components/tools/tavily_search.py b/src/backend/base/langflow/components/tools/tavily_search.py new file mode 100644 index 000000000000..21496dd30c7c --- /dev/null +++ b/src/backend/base/langflow/components/tools/tavily_search.py @@ -0,0 +1,205 @@ +from enum import Enum + +import httpx +from langchain.tools import StructuredTool +from langchain_core.tools import ToolException +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput +from langflow.schema import Data + + +class TavilySearchDepth(Enum): + BASIC = "basic" + ADVANCED = "advanced" + + +class TavilySearchTopic(Enum): + GENERAL = "general" + NEWS = "news" + + +class TavilySearchSchema(BaseModel): + query: str = Field(..., description="The search query you want to execute with Tavily.") + search_depth: TavilySearchDepth = Field(TavilySearchDepth.BASIC, description="The depth of the search.") + topic: TavilySearchTopic = Field(TavilySearchTopic.GENERAL, description="The category of the search.") + max_results: int = Field(5, description="The maximum number of search results to return.") + include_images: bool = Field(default=False, description="Include a list of query-related images in the response.") + include_answer: bool = Field(default=False, description="Include a short answer to original query.") + + +class TavilySearchToolComponent(LCToolComponent): + display_name = "Tavily AI Search" + description = """**Tavily AI** is a search engine optimized for LLMs and RAG, \ + aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool. + +Note: Check 'Advanced' for all options. +""" + icon = "TavilyIcon" + name = "TavilyAISearch" + documentation = "https://docs.tavily.com/" + + inputs = [ + SecretStrInput( + name="api_key", + display_name="Tavily API Key", + required=True, + info="Your Tavily API Key.", + ), + MessageTextInput( + name="query", + display_name="Search Query", + info="The search query you want to execute with Tavily.", + ), + DropdownInput( + name="search_depth", + display_name="Search Depth", + info="The depth of the search.", + options=list(TavilySearchDepth), + value=TavilySearchDepth.ADVANCED, + advanced=True, + ), + DropdownInput( + name="topic", + display_name="Search Topic", + info="The category of the search.", + options=list(TavilySearchTopic), + value=TavilySearchTopic.GENERAL, + advanced=True, + ), + IntInput( + name="max_results", + display_name="Max Results", + info="The maximum number of search results to return.", + value=5, + advanced=True, + ), + BoolInput( + name="include_images", + display_name="Include Images", + info="Include a list of query-related images in the response.", + value=True, + advanced=True, + ), + BoolInput( + name="include_answer", + display_name="Include Answer", + info="Include a short answer to original query.", + value=True, + advanced=True, + ), + ] + + def run_model(self) -> list[Data]: + # Convert string values to enum instances with validation + try: + search_depth_enum = ( + self.search_depth + if isinstance(self.search_depth, TavilySearchDepth) + else TavilySearchDepth(str(self.search_depth).lower()) + ) + except ValueError as e: + error_message = f"Invalid search depth value: {e!s}" + self.status = error_message + return [Data(data={"error": error_message})] + + try: + topic_enum = ( + self.topic if isinstance(self.topic, TavilySearchTopic) else TavilySearchTopic(str(self.topic).lower()) + ) + except ValueError as e: + error_message = f"Invalid topic value: {e!s}" + self.status = error_message + return [Data(data={"error": error_message})] + + return self._tavily_search( + self.query, + search_depth=search_depth_enum, + topic=topic_enum, + max_results=self.max_results, + include_images=self.include_images, + include_answer=self.include_answer, + ) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="tavily_search", + description="Perform a web search using the Tavily API.", + func=self._tavily_search, + args_schema=TavilySearchSchema, + ) + + def _tavily_search( + self, + query: str, + *, + search_depth: TavilySearchDepth = TavilySearchDepth.BASIC, + topic: TavilySearchTopic = TavilySearchTopic.GENERAL, + max_results: int = 5, + include_images: bool = False, + include_answer: bool = False, + ) -> list[Data]: + # Validate enum values + if not isinstance(search_depth, TavilySearchDepth): + msg = f"Invalid search_depth value: {search_depth}" + raise TypeError(msg) + if not isinstance(topic, TavilySearchTopic): + msg = f"Invalid topic value: {topic}" + raise TypeError(msg) + + try: + url = "https://api.tavily.com/search" + headers = { + "content-type": "application/json", + "accept": "application/json", + } + payload = { + "api_key": self.api_key, + "query": query, + "search_depth": search_depth.value, + "topic": topic.value, + "max_results": max_results, + "include_images": include_images, + "include_answer": include_answer, + } + + with httpx.Client() as client: + response = client.post(url, json=payload, headers=headers) + + response.raise_for_status() + search_results = response.json() + + data_results = [ + Data( + data={ + "title": result.get("title"), + "url": result.get("url"), + "content": result.get("content"), + "score": result.get("score"), + } + ) + for result in search_results.get("results", []) + ] + + if include_answer and search_results.get("answer"): + data_results.insert(0, Data(data={"answer": search_results["answer"]})) + + if include_images and search_results.get("images"): + data_results.append(Data(data={"images": search_results["images"]})) + + self.status = data_results # type: ignore[assignment] + + except httpx.HTTPStatusError as e: + error_message = f"HTTP error: {e.response.status_code} - {e.response.text}" + logger.debug(error_message) + self.status = error_message + raise ToolException(error_message) from e + except Exception as e: + error_message = f"Unexpected error: {e}" + logger.opt(exception=True).debug("Error running Tavily Search") + self.status = error_message + raise ToolException(error_message) from e + return data_results diff --git a/src/backend/base/langflow/components/tools/wikidata_api.py b/src/backend/base/langflow/components/tools/wikidata_api.py new file mode 100644 index 000000000000..394bcdba44c5 --- /dev/null +++ b/src/backend/base/langflow/components/tools/wikidata_api.py @@ -0,0 +1,113 @@ +from typing import Any + +import httpx +from langchain_core.tools import StructuredTool, ToolException +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput +from langflow.schema import Data + + +class WikidataSearchSchema(BaseModel): + query: str = Field(..., description="The search query for Wikidata") + + +class WikidataAPIWrapper(BaseModel): + """Wrapper around Wikidata API.""" + + wikidata_api_url: str = "https://www.wikidata.org/w/api.php" + + def results(self, query: str) -> list[dict[str, Any]]: + # Define request parameters for Wikidata API + params = { + "action": "wbsearchentities", + "format": "json", + "search": query, + "language": "en", + } + + # Send request to Wikidata API + response = httpx.get(self.wikidata_api_url, params=params) + response.raise_for_status() + response_json = response.json() + + # Extract and return search results + return response_json.get("search", []) + + def run(self, query: str) -> list[dict[str, Any]]: + try: + results = self.results(query) + if not results: + msg = "No search results found for the given query." + + raise ToolException(msg) + + # Process and structure the results + return [ + { + "label": result.get("label", ""), + "description": result.get("description", ""), + "concepturi": result.get("concepturi", ""), + "id": result.get("id", ""), + } + for result in results + ] + + except Exception as e: + error_message = f"Error in Wikidata Search API: {e!s}" + + raise ToolException(error_message) from e + + +class WikidataAPIComponent(LCToolComponent): + display_name = "Wikidata API" + description = "Performs a search using the Wikidata API." + name = "WikidataAPI" + + inputs = [ + MultilineInput( + name="query", + display_name="Query", + info="The text query for similarity search on Wikidata.", + required=True, + ), + ] + + def build_tool(self) -> Tool: + wrapper = WikidataAPIWrapper() + + # Define the tool using StructuredTool and wrapper's run method + tool = StructuredTool.from_function( + name="wikidata_search_api", + description="Perform similarity search on Wikidata API", + func=wrapper.run, + args_schema=WikidataSearchSchema, + ) + + self.status = "Wikidata Search API Tool for Langchain" + + return tool + + def run_model(self) -> list[Data]: + tool = self.build_tool() + + results = tool.run({"query": self.query}) + + # Transform the API response into Data objects + data = [ + Data( + text=result["label"], + metadata={ + "id": result["id"], + "concepturi": result["concepturi"], + "description": result["description"], + }, + ) + for result in results + ] + + self.status = data # type: ignore[assignment] + + return data diff --git a/src/backend/base/langflow/components/tools/wikipedia_api.py b/src/backend/base/langflow/components/tools/wikipedia_api.py new file mode 100644 index 000000000000..51aafbec323c --- /dev/null +++ b/src/backend/base/langflow/components/tools/wikipedia_api.py @@ -0,0 +1,47 @@ +from typing import cast + +from langchain_community.tools import WikipediaQueryRun +from langchain_community.utilities.wikipedia import WikipediaAPIWrapper + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import BoolInput, IntInput, MessageTextInput, MultilineInput +from langflow.schema import Data + + +class WikipediaAPIComponent(LCToolComponent): + display_name = "Wikipedia API" + description = "Call Wikipedia API." + name = "WikipediaAPI" + + inputs = [ + MultilineInput( + name="input_value", + display_name="Input", + ), + MessageTextInput(name="lang", display_name="Language", value="en"), + IntInput(name="k", display_name="Number of results", value=4, required=True), + BoolInput(name="load_all_available_meta", display_name="Load all available meta", value=False, advanced=True), + IntInput( + name="doc_content_chars_max", display_name="Document content characters max", value=4000, advanced=True + ), + ] + + def run_model(self) -> list[Data]: + wrapper = self._build_wrapper() + docs = wrapper.load(self.input_value) + data = [Data.from_document(doc) for doc in docs] + self.status = data + return data + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + return cast(Tool, WikipediaQueryRun(api_wrapper=wrapper)) + + def _build_wrapper(self) -> WikipediaAPIWrapper: + return WikipediaAPIWrapper( + top_k_results=self.k, + lang=self.lang, + load_all_available_meta=self.load_all_available_meta, + doc_content_chars_max=self.doc_content_chars_max, + ) diff --git a/src/backend/base/langflow/components/tools/wolfram_alpha_api.py b/src/backend/base/langflow/components/tools/wolfram_alpha_api.py new file mode 100644 index 000000000000..27c398ec6066 --- /dev/null +++ b/src/backend/base/langflow/components/tools/wolfram_alpha_api.py @@ -0,0 +1,34 @@ +from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import MultilineInput, SecretStrInput +from langflow.schema import Data + + +class WolframAlphaAPIComponent(LCToolComponent): + display_name = "WolframAlpha API" + description = """Enables queries to Wolfram Alpha for computational data, facts, and calculations across various \ +topics, delivering structured responses.""" + name = "WolframAlphaAPI" + + inputs = [ + MultilineInput( + name="input_value", display_name="Input Query", info="Example query: 'What is the population of France?'" + ), + SecretStrInput(name="app_id", display_name="App ID", required=True), + ] + + def run_model(self) -> list[Data]: + wrapper = self._build_wrapper() + result_str = wrapper.run(self.input_value) + data = [Data(text=result_str)] + self.status = data + return data + + def build_tool(self) -> Tool: + wrapper = self._build_wrapper() + return Tool(name="wolfram_alpha_api", description="Answers mathematical questions.", func=wrapper.run) + + def _build_wrapper(self) -> WolframAlphaAPIWrapper: + return WolframAlphaAPIWrapper(wolfram_alpha_appid=self.app_id) diff --git a/src/backend/base/langflow/components/tools/yahoo_finance.py b/src/backend/base/langflow/components/tools/yahoo_finance.py new file mode 100644 index 000000000000..110c890d5e1f --- /dev/null +++ b/src/backend/base/langflow/components/tools/yahoo_finance.py @@ -0,0 +1,123 @@ +import ast +import pprint +from enum import Enum + +import yfinance as yf +from langchain.tools import StructuredTool +from langchain_core.tools import ToolException +from loguru import logger +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import DropdownInput, IntInput, MessageTextInput +from langflow.schema import Data + + +class YahooFinanceMethod(Enum): + GET_INFO = "get_info" + GET_NEWS = "get_news" + GET_ACTIONS = "get_actions" + GET_ANALYSIS = "get_analysis" + GET_BALANCE_SHEET = "get_balance_sheet" + GET_CALENDAR = "get_calendar" + GET_CASHFLOW = "get_cashflow" + GET_INSTITUTIONAL_HOLDERS = "get_institutional_holders" + GET_RECOMMENDATIONS = "get_recommendations" + GET_SUSTAINABILITY = "get_sustainability" + GET_MAJOR_HOLDERS = "get_major_holders" + GET_MUTUALFUND_HOLDERS = "get_mutualfund_holders" + GET_INSIDER_PURCHASES = "get_insider_purchases" + GET_INSIDER_TRANSACTIONS = "get_insider_transactions" + GET_INSIDER_ROSTER_HOLDERS = "get_insider_roster_holders" + GET_DIVIDENDS = "get_dividends" + GET_CAPITAL_GAINS = "get_capital_gains" + GET_SPLITS = "get_splits" + GET_SHARES = "get_shares" + GET_FAST_INFO = "get_fast_info" + GET_SEC_FILINGS = "get_sec_filings" + GET_RECOMMENDATIONS_SUMMARY = "get_recommendations_summary" + GET_UPGRADES_DOWNGRADES = "get_upgrades_downgrades" + GET_EARNINGS = "get_earnings" + GET_INCOME_STMT = "get_income_stmt" + + +class YahooFinanceSchema(BaseModel): + symbol: str = Field(..., description="The stock symbol to retrieve data for.") + method: YahooFinanceMethod = Field(YahooFinanceMethod.GET_INFO, description="The type of data to retrieve.") + num_news: int | None = Field(5, description="The number of news articles to retrieve.") + + +class YfinanceToolComponent(LCToolComponent): + display_name = "Yahoo Finance" + description = """Uses [yfinance](https://pypi.org/project/yfinance/) (unofficial package) \ +to access financial data and market information from Yahoo Finance.""" + icon = "trending-up" + name = "YahooFinanceTool" + + inputs = [ + MessageTextInput( + name="symbol", + display_name="Stock Symbol", + info="The stock symbol to retrieve data for (e.g., AAPL, GOOG).", + ), + DropdownInput( + name="method", + display_name="Data Method", + info="The type of data to retrieve.", + options=list(YahooFinanceMethod), + value="get_news", + ), + IntInput( + name="num_news", + display_name="Number of News", + info="The number of news articles to retrieve (only applicable for get_news).", + value=5, + ), + ] + + def run_model(self) -> list[Data]: + return self._yahoo_finance_tool( + self.symbol, + self.method, + self.num_news, + ) + + def build_tool(self) -> Tool: + return StructuredTool.from_function( + name="yahoo_finance", + description="Access financial data and market information from Yahoo Finance.", + func=self._yahoo_finance_tool, + args_schema=YahooFinanceSchema, + ) + + def _yahoo_finance_tool( + self, + symbol: str, + method: YahooFinanceMethod, + num_news: int | None = 5, + ) -> list[Data]: + ticker = yf.Ticker(symbol) + + try: + if method == YahooFinanceMethod.GET_INFO: + result = ticker.info + elif method == YahooFinanceMethod.GET_NEWS: + result = ticker.news[:num_news] + else: + result = getattr(ticker, method.value)() + + result = pprint.pformat(result) + + if method == YahooFinanceMethod.GET_NEWS: + data_list = [Data(data=article) for article in ast.literal_eval(result)] + else: + data_list = [Data(data={"result": result})] + + except Exception as e: + error_message = f"Error retrieving data: {e}" + logger.debug(error_message) + self.status = error_message + raise ToolException(error_message) from e + + return data_list diff --git a/src/backend/base/langflow/components/tools/youtube_transcripts.py b/src/backend/base/langflow/components/tools/youtube_transcripts.py new file mode 100644 index 000000000000..ee7c1b15514a --- /dev/null +++ b/src/backend/base/langflow/components/tools/youtube_transcripts.py @@ -0,0 +1,173 @@ +from langchain.tools import StructuredTool +from langchain_community.document_loaders import YoutubeLoader +from langchain_community.document_loaders.youtube import TranscriptFormat +from langchain_core.tools import ToolException +from pydantic import BaseModel, Field + +from langflow.base.langchain_utilities.model import LCToolComponent +from langflow.field_typing import Tool +from langflow.inputs import DropdownInput, IntInput, MultilineInput +from langflow.schema import Data +from langflow.template import Output + + +class YoutubeApiSchema(BaseModel): + """Schema to define the input structure for the tool.""" + + url: str = Field(..., description="The YouTube URL to get transcripts from.") + transcript_format: TranscriptFormat = Field( + TranscriptFormat.TEXT, + description="The format of the transcripts. Either 'text' for a single " + "text output or 'chunks' for timestamped chunks.", + ) + chunk_size_seconds: int = Field( + 120, + description="The size of each transcript chunk in seconds. Only " + "applicable when 'Transcript Format' is set to 'chunks'.", + ) + language: str = Field( + "", + description="A comma-separated list of language codes in descending " "priority. Leave empty for default.", + ) + translation: str = Field( + "", description="Translate the transcripts to the specified language. " "Leave empty for no translation." + ) + + +class YouTubeTranscriptsComponent(LCToolComponent): + """A component that extracts spoken content from YouTube videos as transcripts.""" + + display_name: str = "YouTube Transcripts" + description: str = "Extracts spoken content from YouTube videos as transcripts." + icon: str = "YouTube" + + inputs = [ + MultilineInput( + name="url", display_name="Video URL", info="Enter the YouTube video URL to get transcripts from." + ), + DropdownInput( + name="transcript_format", + display_name="Transcript Format", + options=["text", "chunks"], + value="text", + info="The format of the transcripts. Either 'text' for a single output " + "or 'chunks' for timestamped chunks.", + ), + IntInput( + name="chunk_size_seconds", + display_name="Chunk Size (seconds)", + value=60, + advanced=True, + info="The size of each transcript chunk in seconds. Only applicable when " + "'Transcript Format' is set to 'chunks'.", + ), + MultilineInput( + name="language", + display_name="Language", + info="A comma-separated list of language codes in descending priority. " "Leave empty for default.", + ), + DropdownInput( + name="translation", + display_name="Translation Language", + advanced=True, + options=["", "en", "es", "fr", "de", "it", "pt", "ru", "ja", "ko", "hi", "ar", "id"], + info="Translate the transcripts to the specified language. " "Leave empty for no translation.", + ), + ] + + outputs = [ + Output(name="transcripts", display_name="Data", method="build_youtube_transcripts"), + Output(name="transcripts_tool", display_name="Tool", method="build_youtube_tool"), + ] + + def build_youtube_transcripts(self) -> Data | list[Data]: + """Method to build transcripts from the provided YouTube URL. + + Returns: + Data | list[Data]: The transcripts of the video, either as a single + Data object or a list of Data objects. + """ + try: + loader = YoutubeLoader.from_youtube_url( + self.url, + transcript_format=TranscriptFormat.TEXT + if self.transcript_format == "text" + else TranscriptFormat.CHUNKS, + chunk_size_seconds=self.chunk_size_seconds, + language=self.language.split(",") if self.language else ["en"], + translation=self.translation if self.translation else None, + ) + + transcripts = loader.load() + + if self.transcript_format == "text": + # Extract only the page_content from the Document + return Data(data={"transcripts": transcripts[0].page_content}) + # For chunks, extract page_content and metadata separately + return [Data(data={"content": doc.page_content, "metadata": doc.metadata}) for doc in transcripts] + + except Exception as exc: # noqa: BLE001 + # Using a specific error type for the return value + return Data(data={"error": f"Failed to get YouTube transcripts: {exc!s}"}) + + def youtube_transcripts( + self, + url: str = "", + transcript_format: TranscriptFormat = TranscriptFormat.TEXT, + chunk_size_seconds: int = 120, + language: str = "", + translation: str = "", + ) -> Data | list[Data]: + """Helper method to handle transcripts outside of component calls. + + Args: + url: The YouTube URL to get transcripts from. + transcript_format: Format of transcripts ('text' or 'chunks'). + chunk_size_seconds: Size of each transcript chunk in seconds. + language: Comma-separated list of language codes. + translation: Target language for translation. + + Returns: + Data | list[Data]: Video transcripts as single Data or list of Data. + """ + try: + if isinstance(transcript_format, str): + transcript_format = TranscriptFormat(transcript_format) + loader = YoutubeLoader.from_youtube_url( + url, + transcript_format=TranscriptFormat.TEXT + if transcript_format == TranscriptFormat.TEXT + else TranscriptFormat.CHUNKS, + chunk_size_seconds=chunk_size_seconds, + language=language.split(",") if language else ["en"], + translation=translation if translation else None, + ) + + transcripts = loader.load() + if transcript_format == TranscriptFormat.TEXT and len(transcripts) > 0: + return Data(data={"transcript": transcripts[0].page_content}) + return [Data(data={"content": doc.page_content, "metadata": doc.metadata}) for doc in transcripts] + except Exception as exc: + msg = f"Failed to get YouTube transcripts: {exc!s}" + raise ToolException(msg) from exc + + def build_youtube_tool(self) -> Tool: + """Method to build the transcripts tool. + + Returns: + Tool: A structured tool that uses the transcripts method. + + Raises: + RuntimeError: If tool creation fails. + """ + try: + return StructuredTool.from_function( + name="youtube_transcripts", + description="Get transcripts from YouTube videos.", + func=self.youtube_transcripts, + args_schema=YoutubeApiSchema, + ) + + except Exception as exc: + msg = f"Failed to build the YouTube transcripts tool: {exc!s}" + raise RuntimeError(msg) from exc diff --git a/src/backend/base/langflow/components/unstructured/__init__.py b/src/backend/base/langflow/components/unstructured/__init__.py new file mode 100644 index 000000000000..a9bceaabc03b --- /dev/null +++ b/src/backend/base/langflow/components/unstructured/__init__.py @@ -0,0 +1,3 @@ +from .unstructured import UnstructuredComponent + +__all__ = ["UnstructuredComponent"] diff --git a/src/backend/base/langflow/components/unstructured/unstructured.py b/src/backend/base/langflow/components/unstructured/unstructured.py new file mode 100644 index 000000000000..e55cbf2d239a --- /dev/null +++ b/src/backend/base/langflow/components/unstructured/unstructured.py @@ -0,0 +1,54 @@ +from langchain_unstructured import UnstructuredLoader + +from langflow.custom import Component +from langflow.inputs import FileInput, SecretStrInput +from langflow.schema import Data +from langflow.template import Output + + +class UnstructuredComponent(Component): + display_name = "Unstructured" + description = "Uses Unstructured.io to extract clean text from raw source documents. Supports: PDF, DOCX, TXT" + documentation = "https://python.langchain.com/v0.2/docs/integrations/providers/unstructured/" + trace_type = "tool" + icon = "Unstructured" + name = "Unstructured" + + inputs = [ + FileInput( + name="file", + display_name="File", + required=True, + info="The path to the file with which you want to use Unstructured to parse. Supports: PDF, DOCX, TXT", + file_types=["pdf", "docx", "txt"], # TODO: Support all unstructured file types + ), + SecretStrInput( + name="api_key", + display_name="Unstructured.io Serverless API Key", + required=True, + info="Unstructured API Key. Create at: https://app.unstructured.io/", + ), + ] + + outputs = [ + Output(name="data", display_name="Data", method="load_documents"), + ] + + def build_unstructured(self) -> UnstructuredLoader: + file_paths = [self.file] + + return UnstructuredLoader( + file_paths, + api_key=self.api_key, + partition_via_api=True, + ) + + def load_documents(self) -> list[Data]: + unstructured = self.build_unstructured() + + documents = unstructured.load() + data = [Data.from_document(doc) for doc in documents] # Using the from_document method of Data + + self.status = data + + return data diff --git a/src/backend/base/langflow/components/vectara/__init__.py b/src/backend/base/langflow/components/vectara/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/components/vectorstores/AstraDB.py b/src/backend/base/langflow/components/vectorstores/AstraDB.py deleted file mode 100644 index 6c7d3e451c64..000000000000 --- a/src/backend/base/langflow/components/vectorstores/AstraDB.py +++ /dev/null @@ -1,296 +0,0 @@ -from loguru import logger - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers import docs_to_data -from langflow.inputs import DictInput, FloatInput -from langflow.io import ( - BoolInput, - DataInput, - DropdownInput, - HandleInput, - IntInput, - MultilineInput, - SecretStrInput, - StrInput, -) -from langflow.schema import Data - - -class AstraVectorStoreComponent(LCVectorStoreComponent): - display_name: str = "Astra DB" - description: str = "Implementation of Vector Store using Astra DB with search capabilities" - documentation: str = "https://python.langchain.com/docs/integrations/vectorstores/astradb" - name = "AstraDB" - icon: str = "AstraDB" - - inputs = [ - StrInput( - name="collection_name", - display_name="Collection Name", - info="The name of the collection within Astra DB where the vectors will be stored.", - required=True, - ), - SecretStrInput( - name="token", - display_name="Astra DB Application Token", - info="Authentication token for accessing Astra DB.", - value="ASTRA_DB_APPLICATION_TOKEN", - required=True, - ), - SecretStrInput( - name="api_endpoint", - display_name="API Endpoint", - info="API endpoint URL for the Astra DB service.", - value="ASTRA_DB_API_ENDPOINT", - required=True, - ), - MultilineInput( - name="search_input", - display_name="Search Input", - ), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - StrInput( - name="namespace", - display_name="Namespace", - info="Optional namespace within Astra DB to use for the collection.", - advanced=True, - ), - DropdownInput( - name="metric", - display_name="Metric", - info="Optional distance metric for vector comparisons in the vector store.", - options=["cosine", "dot_product", "euclidean"], - advanced=True, - ), - IntInput( - name="batch_size", - display_name="Batch Size", - info="Optional number of data to process in a single batch.", - advanced=True, - ), - IntInput( - name="bulk_insert_batch_concurrency", - display_name="Bulk Insert Batch Concurrency", - info="Optional concurrency level for bulk insert operations.", - advanced=True, - ), - IntInput( - name="bulk_insert_overwrite_concurrency", - display_name="Bulk Insert Overwrite Concurrency", - info="Optional concurrency level for bulk insert operations that overwrite existing data.", - advanced=True, - ), - IntInput( - name="bulk_delete_concurrency", - display_name="Bulk Delete Concurrency", - info="Optional concurrency level for bulk delete operations.", - advanced=True, - ), - DropdownInput( - name="setup_mode", - display_name="Setup Mode", - info="Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", - options=["Sync", "Async", "Off"], - advanced=True, - value="Sync", - ), - BoolInput( - name="pre_delete_collection", - display_name="Pre Delete Collection", - info="Boolean flag to determine whether to delete the collection before creating a new one.", - advanced=True, - ), - StrInput( - name="metadata_indexing_include", - display_name="Metadata Indexing Include", - info="Optional list of metadata fields to include in the indexing.", - advanced=True, - ), - HandleInput( - name="embedding", - display_name="Embedding or Astra Vectorize", - input_types=["Embeddings", "dict"], - info="Allows either an embedding model or an Astra Vectorize configuration.", # TODO: This should be optional, but need to refactor langchain-astradb first. - ), - StrInput( - name="metadata_indexing_exclude", - display_name="Metadata Indexing Exclude", - info="Optional list of metadata fields to exclude from the indexing.", - advanced=True, - ), - StrInput( - name="collection_indexing_policy", - display_name="Collection Indexing Policy", - info="Optional dictionary defining the indexing policy for the collection.", - advanced=True, - ), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - advanced=True, - value=4, - ), - DropdownInput( - name="search_type", - display_name="Search Type", - info="Search type to use", - options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], - value="Similarity", - advanced=True, - ), - FloatInput( - name="search_score_threshold", - display_name="Search Score Threshold", - info="Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", - value=0, - advanced=True, - ), - DictInput( - name="search_filter", - display_name="Search Metadata Filter", - info="Optional dictionary of filters to apply to the search query.", - advanced=True, - is_list=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self): - try: - from langchain_astradb import AstraDBVectorStore - from langchain_astradb.utils.astradb import SetupMode - except ImportError: - raise ImportError( - "Could not import langchain Astra DB integration package. " - "Please install it with `pip install langchain-astradb`." - ) - - try: - if not self.setup_mode: - self.setup_mode = self._inputs["setup_mode"].options[0] - - setup_mode_value = SetupMode[self.setup_mode.upper()] - except KeyError: - raise ValueError(f"Invalid setup mode: {self.setup_mode}") - - if not isinstance(self.embedding, dict): - embedding_dict = {"embedding": self.embedding} - else: - from astrapy.info import CollectionVectorServiceOptions - - dict_options = self.embedding.get("collection_vector_service_options", {}) - dict_options["authentication"] = { - k: v for k, v in dict_options.get("authentication", {}).items() if k and v - } - dict_options["parameters"] = {k: v for k, v in dict_options.get("parameters", {}).items() if k and v} - embedding_dict = { - "collection_vector_service_options": CollectionVectorServiceOptions.from_dict(dict_options) - } - collection_embedding_api_key = self.embedding.get("collection_embedding_api_key") - if collection_embedding_api_key: - embedding_dict["collection_embedding_api_key"] = collection_embedding_api_key - - vector_store_kwargs = { - **embedding_dict, - "collection_name": self.collection_name, - "token": self.token, - "api_endpoint": self.api_endpoint, - "namespace": self.namespace or None, - "metric": self.metric or None, - "batch_size": self.batch_size or None, - "bulk_insert_batch_concurrency": self.bulk_insert_batch_concurrency or None, - "bulk_insert_overwrite_concurrency": self.bulk_insert_overwrite_concurrency or None, - "bulk_delete_concurrency": self.bulk_delete_concurrency or None, - "setup_mode": setup_mode_value, - "pre_delete_collection": self.pre_delete_collection or False, - } - - if self.metadata_indexing_include: - vector_store_kwargs["metadata_indexing_include"] = self.metadata_indexing_include - elif self.metadata_indexing_exclude: - vector_store_kwargs["metadata_indexing_exclude"] = self.metadata_indexing_exclude - elif self.collection_indexing_policy: - vector_store_kwargs["collection_indexing_policy"] = self.collection_indexing_policy - - try: - vector_store = AstraDBVectorStore(**vector_store_kwargs) - except Exception as e: - raise ValueError(f"Error initializing AstraDBVectorStore: {str(e)}") from e - - self._add_documents_to_vector_store(vector_store) - return vector_store - - def _add_documents_to_vector_store(self, vector_store): - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - raise ValueError("Vector Store Inputs must be Data objects.") - - if documents: - logger.debug(f"Adding {len(documents)} documents to the Vector Store.") - try: - vector_store.add_documents(documents) - except Exception as e: - raise ValueError(f"Error adding documents to AstraDBVectorStore: {str(e)}") from e - else: - logger.debug("No documents to add to the Vector Store.") - - def _map_search_type(self): - if self.search_type == "Similarity with score threshold": - return "similarity_score_threshold" - elif self.search_type == "MMR (Max Marginal Relevance)": - return "mmr" - else: - return "similarity" - - def _build_search_args(self): - args = { - "k": self.number_of_results, - "score_threshold": self.search_score_threshold, - } - - if self.search_filter: - clean_filter = {k: v for k, v in self.search_filter.items() if k and v} - if len(clean_filter) > 0: - args["filter"] = clean_filter - return args - - def search_documents(self) -> list[Data]: - vector_store = self.build_vector_store() - - logger.debug(f"Search input: {self.search_input}") - logger.debug(f"Search type: {self.search_type}") - logger.debug(f"Number of results: {self.number_of_results}") - - if self.search_input and isinstance(self.search_input, str) and self.search_input.strip(): - try: - search_type = self._map_search_type() - search_args = self._build_search_args() - - docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) - except Exception as e: - raise ValueError(f"Error performing search in AstraDBVectorStore: {str(e)}") from e - - logger.debug(f"Retrieved documents: {len(docs)}") - - data = docs_to_data(docs) - logger.debug(f"Converted documents to data: {len(data)}") - self.status = data - return data - else: - logger.debug("No search input provided. Skipping search.") - return [] - - def get_retriever_kwargs(self): - search_args = self._build_search_args() - return { - "search_type": self._map_search_type(), - "search_kwargs": search_args, - } diff --git a/src/backend/base/langflow/components/vectorstores/Cassandra.py b/src/backend/base/langflow/components/vectorstores/Cassandra.py deleted file mode 100644 index 23374f2e01e5..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Cassandra.py +++ /dev/null @@ -1,274 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import Cassandra -from loguru import logger - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.inputs import BoolInput, DictInput, FloatInput -from langflow.io import ( - DataInput, - DropdownInput, - HandleInput, - IntInput, - MessageTextInput, - MultilineInput, - SecretStrInput, -) -from langflow.schema import Data - - -class CassandraVectorStoreComponent(LCVectorStoreComponent): - display_name = "Cassandra" - description = "Cassandra Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/cassandra" - name = "Cassandra" - icon = "Cassandra" - - inputs = [ - MessageTextInput( - name="database_ref", - display_name="Contact Points / Astra Database ID", - info="Contact points for the database (or AstraDB database ID)", - required=True, - ), - MessageTextInput( - name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)." - ), - SecretStrInput( - name="token", - display_name="Password / AstraDB Token", - info="User password for the database (or AstraDB token).", - required=True, - ), - MessageTextInput( - name="keyspace", - display_name="Keyspace", - info="Table Keyspace (or AstraDB namespace).", - required=True, - ), - MessageTextInput( - name="table_name", - display_name="Table Name", - info="The name of the table (or AstraDB collection) where vectors will be stored.", - required=True, - ), - IntInput( - name="ttl_seconds", - display_name="TTL Seconds", - info="Optional time-to-live for the added texts.", - advanced=True, - ), - IntInput( - name="batch_size", - display_name="Batch Size", - info="Optional number of data to process in a single batch.", - value=16, - advanced=True, - ), - DropdownInput( - name="setup_mode", - display_name="Setup Mode", - info="Configuration mode for setting up the Cassandra table, with options like 'Sync', 'Async', or 'Off'.", - options=["Sync", "Async", "Off"], - value="Sync", - advanced=True, - ), - DictInput( - name="cluster_kwargs", - display_name="Cluster arguments", - info="Optional dictionary of additional keyword arguments for the Cassandra cluster.", - advanced=True, - is_list=True, - ), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - DropdownInput( - name="search_type", - display_name="Search Type", - info="Search type to use", - options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], - value="Similarity", - advanced=True, - ), - FloatInput( - name="search_score_threshold", - display_name="Search Score Threshold", - info="Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", - value=0, - advanced=True, - ), - DictInput( - name="search_filter", - display_name="Search Metadata Filter", - info="Optional dictionary of filters to apply to the search query.", - advanced=True, - is_list=True, - ), - MessageTextInput( - name="body_search", - display_name="Search Body", - info="Document textual search terms to apply to the search query.", - advanced=True, - ), - BoolInput( - name="enable_body_search", - display_name="Enable Body Search", - info="Flag to enable body search. This must be enabled BEFORE the table is created.", - value=False, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Cassandra: - try: - import cassio - from langchain_community.utilities.cassandra import SetupMode - except ImportError: - raise ImportError( - "Could not import cassio integration package. " "Please install it with `pip install cassio`." - ) - - from uuid import UUID - - database_ref = self.database_ref - - try: - UUID(self.database_ref) - is_astra = True - except ValueError: - is_astra = False - if "," in self.database_ref: - # use a copy because we can't change the type of the parameter - database_ref = self.database_ref.split(",") - - if is_astra: - cassio.init( - database_id=database_ref, - token=self.token, - cluster_kwargs=self.cluster_kwargs, - ) - else: - cassio.init( - contact_points=database_ref, - username=self.username, - password=self.token, - cluster_kwargs=self.cluster_kwargs, - ) - documents = [] - - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if self.enable_body_search: - body_index_options = [("index_analyzer", "STANDARD")] - else: - body_index_options = None - - if self.setup_mode == "Off": - setup_mode = SetupMode.OFF - elif self.setup_mode == "Sync": - setup_mode = SetupMode.SYNC - else: - setup_mode = SetupMode.ASYNC - - if documents: - logger.debug(f"Adding {len(documents)} documents to the Vector Store.") - table = Cassandra.from_documents( - documents=documents, - embedding=self.embedding, - table_name=self.table_name, - keyspace=self.keyspace, - ttl_seconds=self.ttl_seconds or None, - batch_size=self.batch_size, - body_index_options=body_index_options, - ) - else: - logger.debug("No documents to add to the Vector Store.") - table = Cassandra( - embedding=self.embedding, - table_name=self.table_name, - keyspace=self.keyspace, - ttl_seconds=self.ttl_seconds or None, - body_index_options=body_index_options, - setup_mode=setup_mode, - ) - return table - - def _map_search_type(self): - if self.search_type == "Similarity with score threshold": - return "similarity_score_threshold" - elif self.search_type == "MMR (Max Marginal Relevance)": - return "mmr" - else: - return "similarity" - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - logger.debug(f"Search input: {self.search_query}") - logger.debug(f"Search type: {self.search_type}") - logger.debug(f"Number of results: {self.number_of_results}") - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - try: - search_type = self._map_search_type() - search_args = self._build_search_args() - - logger.debug(f"Search args: {str(search_args)}") - - docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args) - except KeyError as e: - if "content" in str(e): - raise ValueError( - "You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'." - ) - else: - raise e - - logger.debug(f"Retrieved documents: {len(docs)}") - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] - - def _build_search_args(self): - args = { - "k": self.number_of_results, - "score_threshold": self.search_score_threshold, - } - - if self.search_filter: - clean_filter = {k: v for k, v in self.search_filter.items() if k and v} - if len(clean_filter) > 0: - args["filter"] = clean_filter - if self.body_search: - if not self.enable_body_search: - raise ValueError("You should enable body search when creating the table to search the body field.") - args["body_search"] = self.body_search - return args - - def get_retriever_kwargs(self): - search_args = self._build_search_args() - return { - "search_type": self._map_search_type(), - "search_kwargs": search_args, - } diff --git a/src/backend/base/langflow/components/vectorstores/Chroma.py b/src/backend/base/langflow/components/vectorstores/Chroma.py deleted file mode 100644 index 50686b6c63bc..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Chroma.py +++ /dev/null @@ -1,172 +0,0 @@ -from copy import deepcopy -from typing import TYPE_CHECKING - -from chromadb.config import Settings -from langchain_chroma.vectorstores import Chroma -from loguru import logger - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.base.vectorstores.utils import chroma_collection_to_data -from langflow.io import BoolInput, DataInput, DropdownInput, HandleInput, IntInput, StrInput, MultilineInput -from langflow.schema import Data - -if TYPE_CHECKING: - from langchain_chroma import Chroma - - -class ChromaVectorStoreComponent(LCVectorStoreComponent): - """ - Chroma Vector Store with search capabilities - """ - - display_name: str = "Chroma DB" - description: str = "Chroma Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma" - name = "Chroma" - icon = "Chroma" - - inputs = [ - StrInput( - name="collection_name", - display_name="Collection Name", - value="langflow", - ), - StrInput( - name="persist_directory", - display_name="Persist Directory", - ), - MultilineInput( - name="search_query", - display_name="Search Query", - ), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - StrInput( - name="chroma_server_cors_allow_origins", - display_name="Server CORS Allow Origins", - advanced=True, - ), - StrInput( - name="chroma_server_host", - display_name="Server Host", - advanced=True, - ), - IntInput( - name="chroma_server_http_port", - display_name="Server HTTP Port", - advanced=True, - ), - IntInput( - name="chroma_server_grpc_port", - display_name="Server gRPC Port", - advanced=True, - ), - BoolInput( - name="chroma_server_ssl_enabled", - display_name="Server SSL Enabled", - advanced=True, - ), - BoolInput( - name="allow_duplicates", - display_name="Allow Duplicates", - advanced=True, - info="If false, will not add documents that are already in the Vector Store.", - ), - DropdownInput( - name="search_type", - display_name="Search Type", - options=["Similarity", "MMR"], - value="Similarity", - advanced=True, - ), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - advanced=True, - value=10, - ), - IntInput( - name="limit", - display_name="Limit", - advanced=True, - info="Limit the number of records to compare when Allow Duplicates is False.", - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Chroma: - """ - Builds the Chroma object. - """ - try: - from chromadb import Client - from langchain_chroma import Chroma - except ImportError: - raise ImportError( - "Could not import Chroma integration package. " "Please install it with `pip install langchain-chroma`." - ) - # Chroma settings - chroma_settings = None - client = None - if self.chroma_server_host: - chroma_settings = Settings( - chroma_server_cors_allow_origins=self.chroma_server_cors_allow_origins or [], - chroma_server_host=self.chroma_server_host, - chroma_server_http_port=self.chroma_server_http_port or None, - chroma_server_grpc_port=self.chroma_server_grpc_port or None, - chroma_server_ssl_enabled=self.chroma_server_ssl_enabled, - ) - client = Client(settings=chroma_settings) - - # Check persist_directory and expand it if it is a relative path - if self.persist_directory is not None: - persist_directory = self.resolve_path(self.persist_directory) - else: - persist_directory = None - - chroma = Chroma( - persist_directory=persist_directory, - client=client, - embedding_function=self.embedding, - collection_name=self.collection_name, - ) - - self._add_documents_to_vector_store(chroma) - self.status = chroma_collection_to_data(chroma.get(limit=self.limit)) - return chroma - - def _add_documents_to_vector_store(self, vector_store: "Chroma") -> None: - """ - Adds documents to the Vector Store. - """ - if not self.ingest_data: - self.status = "" - return - - _stored_documents_without_id = [] - if self.allow_duplicates: - stored_data = [] - else: - stored_data = chroma_collection_to_data(vector_store.get(limit=self.limit)) - for value in deepcopy(stored_data): - del value.id - _stored_documents_without_id.append(value) - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - if _input not in _stored_documents_without_id: - documents.append(_input.to_lc_document()) - else: - raise ValueError("Vector Store Inputs must be Data objects.") - - if documents and self.embedding is not None: - logger.debug(f"Adding {len(documents)} documents to the Vector Store.") - vector_store.add_documents(documents) - else: - logger.debug("No documents to add to the Vector Store.") diff --git a/src/backend/base/langflow/components/vectorstores/Couchbase.py b/src/backend/base/langflow/components/vectorstores/Couchbase.py deleted file mode 100644 index e0273fb80bc7..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Couchbase.py +++ /dev/null @@ -1,108 +0,0 @@ -from datetime import timedelta -from typing import List - -from langchain_community.vectorstores import CouchbaseVectorStore - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput -from langflow.schema import Data - - -class CouchbaseVectorStoreComponent(LCVectorStoreComponent): - display_name = "Couchbase" - description = "Couchbase Vector Store with search capabilities" - documentation = "https://python.langchain.com/v0.1/docs/integrations/document_loaders/couchbase/" - name = "Couchbase" - icon = "Couchbase" - - inputs = [ - SecretStrInput( - name="couchbase_connection_string", display_name="Couchbase Cluster connection string", required=True - ), - StrInput(name="couchbase_username", display_name="Couchbase username", required=True), - SecretStrInput(name="couchbase_password", display_name="Couchbase password", required=True), - StrInput(name="bucket_name", display_name="Bucket Name", required=True), - StrInput(name="scope_name", display_name="Scope Name", required=True), - StrInput(name="collection_name", display_name="Collection Name", required=True), - StrInput(name="index_name", display_name="Index Name", required=True), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> CouchbaseVectorStore: - try: - from couchbase.auth import PasswordAuthenticator # type: ignore - from couchbase.cluster import Cluster # type: ignore - from couchbase.options import ClusterOptions # type: ignore - except ImportError as e: - raise ImportError( - "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`" - ) from e - - try: - auth = PasswordAuthenticator(self.couchbase_username, self.couchbase_password) - options = ClusterOptions(auth) - cluster = Cluster(self.couchbase_connection_string, options) - - cluster.wait_until_ready(timedelta(seconds=5)) - except Exception as e: - raise ValueError(f"Failed to connect to Couchbase: {e}") - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - couchbase_vs = CouchbaseVectorStore.from_documents( - documents=documents, - cluster=cluster, - bucket_name=self.bucket_name, - scope_name=self.scope_name, - collection_name=self.collection_name, - embedding=self.embedding, - index_name=self.index_name, - ) - - else: - couchbase_vs = CouchbaseVectorStore( - cluster=cluster, - bucket_name=self.bucket_name, - scope_name=self.scope_name, - collection_name=self.collection_name, - embedding=self.embedding, - index_name=self.index_name, - ) - - return couchbase_vs - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/FAISS.py b/src/backend/base/langflow/components/vectorstores/FAISS.py deleted file mode 100644 index 384e80190fdc..000000000000 --- a/src/backend/base/langflow/components/vectorstores/FAISS.py +++ /dev/null @@ -1,117 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import FAISS -from loguru import logger - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import BoolInput, DataInput, HandleInput, IntInput, MultilineInput, StrInput -from langflow.schema import Data - - -class FaissVectorStoreComponent(LCVectorStoreComponent): - """ - FAISS Vector Store with search capabilities - """ - - display_name: str = "FAISS" - description: str = "FAISS Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" - name = "FAISS" - icon = "FAISS" - - inputs = [ - StrInput( - name="index_name", - display_name="Index Name", - value="langflow_index", - ), - StrInput( - name="persist_directory", - display_name="Persist Directory", - info="Path to save the FAISS index. It will be relative to where Langflow is running.", - ), - MultilineInput( - name="search_query", - display_name="Search Query", - ), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - BoolInput( - name="allow_dangerous_deserialization", - display_name="Allow Dangerous Deserialization", - info="Set to True to allow loading pickle files from untrusted sources. Only enable this if you trust the source of the data.", - advanced=True, - value=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - advanced=True, - value=4, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> FAISS: - """ - Builds the FAISS object. - """ - if not self.persist_directory: - raise ValueError("Folder path is required to save the FAISS index.") - path = self.resolve_path(self.persist_directory) - - documents = [] - - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - faiss = FAISS.from_documents(documents=documents, embedding=self.embedding) - faiss.save_local(str(path), self.index_name) - - return faiss - - def search_documents(self) -> List[Data]: - """ - Search for documents in the FAISS vector store. - """ - if not self.persist_directory: - raise ValueError("Folder path is required to load the FAISS index.") - path = self.resolve_path(self.persist_directory) - - vector_store = FAISS.load_local( - folder_path=path, - embeddings=self.embedding, - index_name=self.index_name, - allow_dangerous_deserialization=self.allow_dangerous_deserialization, - ) - - if not vector_store: - raise ValueError("Failed to load the FAISS index.") - - logger.debug(f"Search input: {self.search_query}") - logger.debug(f"Number of results: {self.number_of_results}") - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - logger.debug(f"Retrieved documents: {len(docs)}") - - data = docs_to_data(docs) - logger.debug(f"Converted documents to data: {len(data)}") - logger.debug(data) - return data # Return the search results data - else: - logger.debug("No search input provided. Skipping search.") - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Milvus.py b/src/backend/base/langflow/components/vectorstores/Milvus.py deleted file mode 100644 index 879cb445190d..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Milvus.py +++ /dev/null @@ -1,124 +0,0 @@ -from typing import List - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import ( - DataInput, - StrInput, - IntInput, - FloatInput, - BoolInput, - DictInput, - MultilineInput, - DropdownInput, - SecretStrInput, - HandleInput, -) -from langflow.schema import Data - - -class MilvusVectorStoreComponent(LCVectorStoreComponent): - """Milvus vector store with search capabilities""" - - display_name: str = "Milvus" - description: str = "Milvus vector store with search capabilities" - documentation = "https://python.langchain.com/docs/integrations/vectorstores/milvus" - name = "Milvus" - icon = "Milvus" - - inputs = [ - StrInput(name="collection_name", display_name="Collection Name", value="langflow"), - StrInput(name="collection_description", display_name="Collection Description", value=""), - StrInput( - name="uri", - display_name="Connection URI", - value="http://localhost:19530", - ), - SecretStrInput( - name="password", - display_name="Connection Password", - value="", - info="Ignore this field if no password is required to make connection.", - ), - DictInput(name="connection_args", display_name="Other Connection Arguments", advanced=True), - StrInput(name="primary_field", display_name="Primary Field Name", value="pk"), - StrInput(name="text_field", display_name="Text Field Name", value="text"), - StrInput(name="vector_field", display_name="Vector Field Name", value="vector"), - DropdownInput( - name="consistency_level", - display_name="Consistencey Level", - options=["Bounded", "Session", "Strong", "Eventual"], - value="Session", - advanced=True, - ), - DictInput(name="index_params", display_name="Index Parameters", advanced=True), - DictInput(name="search_params", display_name="Search Parameters", advanced=True), - BoolInput(name="drop_old", display_name="Drop Old Collection", value=False, advanced=True), - FloatInput(name="timeout", display_name="Timeout", advanced=True), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self): - try: - from langchain_milvus.vectorstores import Milvus as LangchainMilvus - except ImportError: - raise ImportError( - "Could not import Milvus integration package. " "Please install it with `pip install langchain-milvus`." - ) - self.connection_args.update(uri=self.uri, token=self.password) - milvus_store = LangchainMilvus( - embedding_function=self.embedding, - collection_name=self.collection_name, - collection_description=self.collection_description, - connection_args=self.connection_args, - consistency_level=self.consistency_level, - index_params=self.index_params, - search_params=self.search_params, - drop_old=self.drop_old, - auto_id=True, - primary_field=self.primary_field, - text_field=self.text_field, - vector_field=self.vector_field, - timeout=self.timeout, - ) - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - milvus_store.add_documents(documents) - - return milvus_store - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py deleted file mode 100644 index 069e9ace5d60..000000000000 --- a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import MongoDBAtlasVectorSearch - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput -from langflow.schema import Data - - -class MongoVectorStoreComponent(LCVectorStoreComponent): - display_name = "MongoDB Atlas" - description = "MongoDB Atlas Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas" - name = "MongoDBAtlasVector" - icon = "MongoDB" - - inputs = [ - SecretStrInput(name="mongodb_atlas_cluster_uri", display_name="MongoDB Atlas Cluster URI", required=True), - StrInput(name="db_name", display_name="Database Name", required=True), - StrInput(name="collection_name", display_name="Collection Name", required=True), - StrInput(name="index_name", display_name="Index Name", required=True), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> MongoDBAtlasVectorSearch: - try: - from pymongo import MongoClient - except ImportError: - raise ImportError("Please install pymongo to use MongoDB Atlas Vector Store") - - try: - mongo_client: MongoClient = MongoClient(self.mongodb_atlas_cluster_uri) - collection = mongo_client[self.db_name][self.collection_name] - except Exception as e: - raise ValueError(f"Failed to connect to MongoDB Atlas: {e}") - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - vector_store = MongoDBAtlasVectorSearch.from_documents( - documents=documents, embedding=self.embedding, collection=collection, index_name=self.index_name - ) - else: - vector_store = MongoDBAtlasVectorSearch( - embedding=self.embedding, - collection=collection, - index_name=self.index_name, - ) - else: - vector_store = MongoDBAtlasVectorSearch( - embedding=self.embedding, - collection=collection, - index_name=self.index_name, - ) - - return vector_store - - def search_documents(self) -> List[Data]: - from bson import ObjectId - - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - for doc in docs: - doc.metadata = { - key: str(value) if isinstance(value, ObjectId) else value for key, value in doc.metadata.items() - } - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Pinecone.py b/src/backend/base/langflow/components/vectorstores/Pinecone.py deleted file mode 100644 index d8f13a23941a..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Pinecone.py +++ /dev/null @@ -1,101 +0,0 @@ -from typing import List - -from langchain_pinecone import Pinecone - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import ( - DropdownInput, - HandleInput, - IntInput, - StrInput, - SecretStrInput, - DataInput, - MultilineInput, -) -from langflow.schema import Data - - -class PineconeVectorStoreComponent(LCVectorStoreComponent): - display_name = "Pinecone" - description = "Pinecone Vector Store with search capabilities" - documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/" - name = "Pinecone" - icon = "Pinecone" - - inputs = [ - StrInput(name="index_name", display_name="Index Name", required=True), - StrInput(name="namespace", display_name="Namespace", info="Namespace for the index."), - DropdownInput( - name="distance_strategy", - display_name="Distance Strategy", - options=["Cosine", "Euclidean", "Dot Product"], - value="Cosine", - advanced=True, - ), - SecretStrInput(name="pinecone_api_key", display_name="Pinecone API Key", required=True), - StrInput( - name="text_key", - display_name="Text Key", - info="Key in the record to use as text.", - value="text", - advanced=True, - ), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Pinecone: - from langchain_pinecone._utilities import DistanceStrategy - from langchain_pinecone.vectorstores import Pinecone - - distance_strategy = self.distance_strategy.replace(" ", "_").upper() - _distance_strategy = DistanceStrategy[distance_strategy] - - pinecone = Pinecone( - index_name=self.index_name, - embedding=self.embedding, - text_key=self.text_key, - namespace=self.namespace, - distance_strategy=_distance_strategy, - pinecone_api_key=self.pinecone_api_key, - ) - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - pinecone.add_documents(documents) - return pinecone - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Qdrant.py b/src/backend/base/langflow/components/vectorstores/Qdrant.py deleted file mode 100644 index 59ac3e13d3d3..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Qdrant.py +++ /dev/null @@ -1,114 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import Qdrant -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import ( - DropdownInput, - HandleInput, - IntInput, - StrInput, - SecretStrInput, - DataInput, - MultilineInput, -) -from langflow.schema import Data -from langchain.embeddings.base import Embeddings - - -class QdrantVectorStoreComponent(LCVectorStoreComponent): - display_name = "Qdrant" - description = "Qdrant Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant" - icon = "Qdrant" - - inputs = [ - StrInput(name="collection_name", display_name="Collection Name", required=True), - StrInput(name="host", display_name="Host", value="localhost", advanced=True), - IntInput(name="port", display_name="Port", value=6333, advanced=True), - IntInput(name="grpc_port", display_name="gRPC Port", value=6334, advanced=True), - SecretStrInput(name="api_key", display_name="API Key", advanced=True), - StrInput(name="prefix", display_name="Prefix", advanced=True), - IntInput(name="timeout", display_name="Timeout", advanced=True), - StrInput(name="path", display_name="Path", advanced=True), - StrInput(name="url", display_name="URL", advanced=True), - DropdownInput( - name="distance_func", - display_name="Distance Function", - options=["Cosine", "Euclidean", "Dot Product"], - value="Cosine", - advanced=True, - ), - StrInput(name="content_payload_key", display_name="Content Payload Key", value="page_content", advanced=True), - StrInput(name="metadata_payload_key", display_name="Metadata Payload Key", value="metadata", advanced=True), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Qdrant: - qdrant_kwargs = { - "collection_name": self.collection_name, - "content_payload_key": self.content_payload_key, - "metadata_payload_key": self.metadata_payload_key, - } - - server_kwargs = { - "host": self.host if self.host else None, - "port": int(self.port), # Garantir que port seja um inteiro - "grpc_port": int(self.grpc_port), # Garantir que grpc_port seja um inteiro - "api_key": self.api_key, - "prefix": self.prefix, - "timeout": int(self.timeout) if self.timeout else None, # Garantir que timeout seja um inteiro - "path": self.path if self.path else None, - "url": self.url if self.url else None, - } - - server_kwargs = {k: v for k, v in server_kwargs.items() if v is not None} - documents = [] - - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if not isinstance(self.embedding, Embeddings): - raise ValueError("Invalid embedding object") - - if documents: - qdrant = Qdrant.from_documents(documents, embedding=self.embedding, **qdrant_kwargs) - else: - from qdrant_client import QdrantClient - - client = QdrantClient(**server_kwargs) - qdrant = Qdrant(embeddings=self.embedding, client=client, **qdrant_kwargs) - - return qdrant - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Redis.py b/src/backend/base/langflow/components/vectorstores/Redis.py deleted file mode 100644 index 3e38efd309a6..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Redis.py +++ /dev/null @@ -1,95 +0,0 @@ -from typing import List - -from langchain_community.vectorstores.redis import Redis - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput -from langflow.schema import Data -from langchain.text_splitter import CharacterTextSplitter - - -class RedisVectorStoreComponent(LCVectorStoreComponent): - """ - A custom component for implementing a Vector Store using Redis. - """ - - display_name: str = "Redis" - description: str = "Implementation of Vector Store using Redis" - documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis" - name = "Redis" - - inputs = [ - SecretStrInput(name="redis_server_url", display_name="Redis Server Connection String", required=True), - StrInput( - name="redis_index_name", - display_name="Redis Index", - ), - StrInput(name="code", display_name="Code", advanced=True), - StrInput( - name="schema", - display_name="Schema", - ), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Redis: - documents = [] - - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - with open("docuemnts.txt", "w") as f: - f.write(str(documents)) - - if not documents: - if self.schema is None: - raise ValueError("If no documents are provided, a schema must be provided.") - redis_vs = Redis.from_existing_index( - embedding=self.embedding, - index_name=self.redis_index_name, - schema=self.schema, - key_prefix=None, - redis_url=self.redis_server_url, - ) - else: - text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) - docs = text_splitter.split_documents(documents) - redis_vs = Redis.from_documents( - documents=docs, - embedding=self.embedding, - redis_url=self.redis_server_url, - index_name=self.redis_index_name, - ) - return redis_vs - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py deleted file mode 100644 index a03fc1fac2b1..000000000000 --- a/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py +++ /dev/null @@ -1,82 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import SupabaseVectorStore -from supabase.client import Client, create_client - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput -from langflow.schema import Data - - -class SupabaseVectorStoreComponent(LCVectorStoreComponent): - display_name = "Supabase" - description = "Supabase Vector Store with search capabilities" - documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/" - name = "SupabaseVectorStore" - icon = "Supabase" - - inputs = [ - StrInput(name="supabase_url", display_name="Supabase URL", required=True), - SecretStrInput(name="supabase_service_key", display_name="Supabase Service Key", required=True), - StrInput(name="table_name", display_name="Table Name", advanced=True), - StrInput(name="query_name", display_name="Query Name"), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> SupabaseVectorStore: - supabase: Client = create_client(self.supabase_url, supabase_key=self.supabase_service_key) - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - supabase_vs = SupabaseVectorStore.from_documents( - documents=documents, - embedding=self.embedding, - query_name=self.query_name, - client=supabase, - table_name=self.table_name, - ) - else: - supabase_vs = SupabaseVectorStore( - client=supabase, - embedding=self.embedding, - table_name=self.table_name, - query_name=self.query_name, - ) - - return supabase_vs - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Upstash.py b/src/backend/base/langflow/components/vectorstores/Upstash.py deleted file mode 100644 index 45d3e089a898..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Upstash.py +++ /dev/null @@ -1,131 +0,0 @@ -from typing import List - -from langchain_community.vectorstores import UpstashVectorStore - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import ( - HandleInput, - IntInput, - StrInput, - SecretStrInput, - DataInput, - MultilineInput, -) -from langflow.schema import Data - - -class UpstashVectorStoreComponent(LCVectorStoreComponent): - display_name = "Upstash" - description = "Upstash Vector Store with search capabilities" - documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/" - name = "Upstash" - icon = "Upstash" - - inputs = [ - StrInput( - name="index_url", - display_name="Index URL", - info="The URL of the Upstash index.", - required=True, - ), - SecretStrInput( - name="index_token", - display_name="Index Token", - info="The token for the Upstash index.", - required=True, - ), - StrInput( - name="text_key", - display_name="Text Key", - info="The key in the record to use as text.", - value="text", - advanced=True, - ), - StrInput( - name="namespace", - display_name="Namespace", - info="Leave empty for default namespace.", - ), - MultilineInput(name="search_query", display_name="Search Query"), - MultilineInput( - name="metadata_filter", - display_name="Metadata Filter", - info="Filters documents by metadata. Look at the documentation for more information.", - ), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput( - name="embedding", - display_name="Embedding", - input_types=["Embeddings"], - info="To use Upstash's embeddings, don't provide an embedding.", - ), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> UpstashVectorStore: - use_upstash_embedding = self.embedding is None - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - if use_upstash_embedding: - upstash_vs = UpstashVectorStore( - embedding=use_upstash_embedding, - text_key=self.text_key, - index_url=self.index_url, - index_token=self.index_token, - namespace=self.namespace, - ) - upstash_vs.add_documents(documents) - else: - upstash_vs = UpstashVectorStore.from_documents( - documents=documents, - embedding=self.embedding, - text_key=self.text_key, - index_url=self.index_url, - index_token=self.index_token, - namespace=self.namespace, - ) - else: - upstash_vs = UpstashVectorStore( - embedding=self.embedding or use_upstash_embedding, - text_key=self.text_key, - index_url=self.index_url, - index_token=self.index_token, - namespace=self.namespace, - ) - - return upstash_vs - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - filter=self.metadata_filter, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Vectara.py b/src/backend/base/langflow/components/vectorstores/Vectara.py deleted file mode 100644 index f077f11ebb7f..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Vectara.py +++ /dev/null @@ -1,110 +0,0 @@ -from typing import TYPE_CHECKING, List - -from langchain_community.vectorstores import Vectara -from loguru import logger - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, MessageTextInput, SecretStrInput, StrInput -from langflow.schema import Data - -if TYPE_CHECKING: - from langchain_community.vectorstores import Vectara - - -class VectaraVectorStoreComponent(LCVectorStoreComponent): - """ - Vectara Vector Store with search capabilities - """ - - display_name: str = "Vectara" - description: str = "Vectara Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/vectara" - name = "Vectara" - icon = "Vectara" - - inputs = [ - StrInput(name="vectara_customer_id", display_name="Vectara Customer ID", required=True), - StrInput(name="vectara_corpus_id", display_name="Vectara Corpus ID", required=True), - SecretStrInput(name="vectara_api_key", display_name="Vectara API Key", required=True), - HandleInput( - name="embedding", - display_name="Embedding", - input_types=["Embeddings"], - ), - HandleInput( - name="ingest_data", - display_name="Ingest Data", - input_types=["Document", "Data"], - is_list=True, - ), - MessageTextInput( - name="search_query", - display_name="Search Query", - ), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - ] - - @check_cached_vector_store - def build_vector_store(self) -> "Vectara": - """ - Builds the Vectara object. - """ - try: - from langchain_community.vectorstores import Vectara - except ImportError: - raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.") - - vectara = Vectara( - vectara_customer_id=self.vectara_customer_id, - vectara_corpus_id=self.vectara_corpus_id, - vectara_api_key=self.vectara_api_key, - ) - - self._add_documents_to_vector_store(vectara) - return vectara - - def _add_documents_to_vector_store(self, vector_store: "Vectara") -> None: - """ - Adds documents to the Vector Store. - """ - if not self.ingest_data: - self.status = "No documents to add to Vectara" - return - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents: - logger.debug(f"Adding {len(documents)} documents to Vectara.") - vector_store.add_documents(documents) - self.status = f"Added {len(documents)} documents to Vectara" - else: - logger.debug("No documents to add to Vectara.") - self.status = "No valid documents to add to Vectara" - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = f"Found {len(data)} results for the query: {self.search_query}" - return data - else: - self.status = "No search query provided" - return [] diff --git a/src/backend/base/langflow/components/vectorstores/Weaviate.py b/src/backend/base/langflow/components/vectorstores/Weaviate.py deleted file mode 100644 index 94266720cb43..000000000000 --- a/src/backend/base/langflow/components/vectorstores/Weaviate.py +++ /dev/null @@ -1,86 +0,0 @@ -from typing import List - -import weaviate # type: ignore -from langchain_community.vectorstores import Weaviate - -from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store -from langflow.helpers.data import docs_to_data -from langflow.io import BoolInput, HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput -from langflow.schema import Data - - -class WeaviateVectorStoreComponent(LCVectorStoreComponent): - display_name = "Weaviate" - description = "Weaviate Vector Store with search capabilities" - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate" - name = "Weaviate" - icon = "Weaviate" - - inputs = [ - StrInput(name="url", display_name="Weaviate URL", value="http://localhost:8080", required=True), - SecretStrInput(name="api_key", display_name="API Key", required=False), - StrInput(name="index_name", display_name="Index Name", required=True), - StrInput(name="text_key", display_name="Text Key", value="text", advanced=True), - MultilineInput(name="search_query", display_name="Search Query"), - DataInput( - name="ingest_data", - display_name="Ingest Data", - is_list=True, - ), - HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), - IntInput( - name="number_of_results", - display_name="Number of Results", - info="Number of results to return.", - value=4, - advanced=True, - ), - BoolInput(name="search_by_text", display_name="Search By Text", advanced=True), - ] - - @check_cached_vector_store - def build_vector_store(self) -> Weaviate: - if self.api_key: - auth_config = weaviate.AuthApiKey(api_key=self.api_key) - client = weaviate.Client(url=self.url, auth_client_secret=auth_config) - else: - client = weaviate.Client(url=self.url) - - documents = [] - for _input in self.ingest_data or []: - if isinstance(_input, Data): - documents.append(_input.to_lc_document()) - else: - documents.append(_input) - - if documents and self.embedding: - return Weaviate.from_documents( - client=client, - index_name=self.index_name, - documents=documents, - embedding=self.embedding, - by_text=self.search_by_text, - ) - - return Weaviate( - client=client, - index_name=self.index_name, - text_key=self.text_key, - embedding=self.embedding, - by_text=self.search_by_text, - ) - - def search_documents(self) -> List[Data]: - vector_store = self.build_vector_store() - - if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): - docs = vector_store.similarity_search( - query=self.search_query, - k=self.number_of_results, - ) - - data = docs_to_data(docs) - self.status = data - return data - else: - return [] diff --git a/src/backend/base/langflow/components/vectorstores/__init__.py b/src/backend/base/langflow/components/vectorstores/__init__.py index e69de29bb2d1..7ac64d890636 100644 --- a/src/backend/base/langflow/components/vectorstores/__init__.py +++ b/src/backend/base/langflow/components/vectorstores/__init__.py @@ -0,0 +1,49 @@ +from .astradb import AstraVectorStoreComponent +from .astradb_graph import AstraGraphVectorStoreComponent +from .cassandra import CassandraVectorStoreComponent +from .cassandra_graph import CassandraGraphVectorStoreComponent +from .chroma import ChromaVectorStoreComponent +from .clickhouse import ClickhouseVectorStoreComponent +from .couchbase import CouchbaseVectorStoreComponent +from .elasticsearch import ElasticsearchVectorStoreComponent +from .faiss import FaissVectorStoreComponent +from .hcd import HCDVectorStoreComponent +from .milvus import MilvusVectorStoreComponent +from .mongodb_atlas import MongoVectorStoreComponent +from .opensearch import OpenSearchVectorStoreComponent +from .pgvector import PGVectorStoreComponent +from .pinecone import PineconeVectorStoreComponent +from .qdrant import QdrantVectorStoreComponent +from .redis import RedisVectorStoreComponent +from .supabase import SupabaseVectorStoreComponent +from .upstash import UpstashVectorStoreComponent +from .vectara import VectaraVectorStoreComponent +from .vectara_rag import VectaraRagComponent +from .vectara_self_query import VectaraSelfQueryRetriverComponent +from .weaviate import WeaviateVectorStoreComponent + +__all__ = [ + "AstraGraphVectorStoreComponent", + "AstraVectorStoreComponent", + "CassandraGraphVectorStoreComponent", + "CassandraVectorStoreComponent", + "ChromaVectorStoreComponent", + "ClickhouseVectorStoreComponent", + "CouchbaseVectorStoreComponent", + "ElasticsearchVectorStoreComponent", + "FaissVectorStoreComponent", + "HCDVectorStoreComponent", + "MilvusVectorStoreComponent", + "MongoVectorStoreComponent", + "OpenSearchVectorStoreComponent", + "PGVectorStoreComponent", + "PineconeVectorStoreComponent", + "QdrantVectorStoreComponent", + "RedisVectorStoreComponent", + "SupabaseVectorStoreComponent", + "UpstashVectorStoreComponent", + "VectaraVectorStoreComponent", + "VectaraRagComponent", + "VectaraSelfQueryRetriverComponent", + "WeaviateVectorStoreComponent", +] diff --git a/src/backend/base/langflow/components/vectorstores/astradb.py b/src/backend/base/langflow/components/vectorstores/astradb.py new file mode 100644 index 000000000000..13797adfaa45 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/astradb.py @@ -0,0 +1,528 @@ +import os + +import orjson +from astrapy.admin import parse_api_endpoint + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers import docs_to_data +from langflow.inputs import DictInput, FloatInput, MessageTextInput +from langflow.io import ( + BoolInput, + DataInput, + DropdownInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class AstraVectorStoreComponent(LCVectorStoreComponent): + display_name: str = "Astra DB" + description: str = "Implementation of Vector Store using Astra DB with search capabilities" + documentation: str = "https://docs.langflow.org/starter-projects-vector-store-rag" + name = "AstraDB" + icon: str = "AstraDB" + + VECTORIZE_PROVIDERS_MAPPING = { + "Azure OpenAI": ["azureOpenAI", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], + "Hugging Face - Dedicated": ["huggingfaceDedicated", ["endpoint-defined-model"]], + "Hugging Face - Serverless": [ + "huggingface", + [ + "sentence-transformers/all-MiniLM-L6-v2", + "intfloat/multilingual-e5-large", + "intfloat/multilingual-e5-large-instruct", + "BAAI/bge-small-en-v1.5", + "BAAI/bge-base-en-v1.5", + "BAAI/bge-large-en-v1.5", + ], + ], + "Jina AI": [ + "jinaAI", + [ + "jina-embeddings-v2-base-en", + "jina-embeddings-v2-base-de", + "jina-embeddings-v2-base-es", + "jina-embeddings-v2-base-code", + "jina-embeddings-v2-base-zh", + ], + ], + "Mistral AI": ["mistral", ["mistral-embed"]], + "NVIDIA": ["nvidia", ["NV-Embed-QA"]], + "OpenAI": ["openai", ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]], + "Upstage": ["upstageAI", ["solar-embedding-1-large"]], + "Voyage AI": [ + "voyageAI", + ["voyage-large-2-instruct", "voyage-law-2", "voyage-code-2", "voyage-large-2", "voyage-2"], + ], + } + + inputs = [ + SecretStrInput( + name="token", + display_name="Astra DB Application Token", + info="Authentication token for accessing Astra DB.", + value="ASTRA_DB_APPLICATION_TOKEN", + required=True, + advanced=os.getenv("ASTRA_ENHANCED", "false").lower() == "true", + ), + SecretStrInput( + name="api_endpoint", + display_name="Database" if os.getenv("ASTRA_ENHANCED", "false").lower() == "true" else "API Endpoint", + info="API endpoint URL for the Astra DB service.", + value="ASTRA_DB_API_ENDPOINT", + required=True, + ), + StrInput( + name="collection_name", + display_name="Collection Name", + info="The name of the collection within Astra DB where the vectors will be stored.", + required=True, + ), + MultilineInput( + name="search_input", + display_name="Search Input", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Optional namespace within Astra DB to use for the collection.", + advanced=True, + ), + DropdownInput( + name="embedding_choice", + display_name="Embedding Model or Astra Vectorize", + info="Determines whether to use Astra Vectorize for the collection.", + options=["Embedding Model", "Astra Vectorize"], + real_time_refresh=True, + value="Embedding Model", + ), + HandleInput( + name="embedding", + display_name="Embedding Model", + input_types=["Embeddings"], + info="Allows an embedding model configuration.", + ), + DropdownInput( + name="metric", + display_name="Metric", + info="Optional distance metric for vector comparisons in the vector store.", + options=["cosine", "dot_product", "euclidean"], + value="cosine", + advanced=True, + ), + IntInput( + name="batch_size", + display_name="Batch Size", + info="Optional number of data to process in a single batch.", + advanced=True, + ), + IntInput( + name="bulk_insert_batch_concurrency", + display_name="Bulk Insert Batch Concurrency", + info="Optional concurrency level for bulk insert operations.", + advanced=True, + ), + IntInput( + name="bulk_insert_overwrite_concurrency", + display_name="Bulk Insert Overwrite Concurrency", + info="Optional concurrency level for bulk insert operations that overwrite existing data.", + advanced=True, + ), + IntInput( + name="bulk_delete_concurrency", + display_name="Bulk Delete Concurrency", + info="Optional concurrency level for bulk delete operations.", + advanced=True, + ), + DropdownInput( + name="setup_mode", + display_name="Setup Mode", + info="Configuration mode for setting up the vector store, with options like 'Sync' or 'Off'.", + options=["Sync", "Off"], + advanced=True, + value="Sync", + ), + BoolInput( + name="pre_delete_collection", + display_name="Pre Delete Collection", + info="Boolean flag to determine whether to delete the collection before creating a new one.", + advanced=True, + ), + StrInput( + name="metadata_indexing_include", + display_name="Metadata Indexing Include", + info="Optional list of metadata fields to include in the indexing.", + is_list=True, + advanced=True, + ), + StrInput( + name="metadata_indexing_exclude", + display_name="Metadata Indexing Exclude", + info="Optional list of metadata fields to exclude from the indexing.", + is_list=True, + advanced=True, + ), + StrInput( + name="collection_indexing_policy", + display_name="Collection Indexing Policy", + info='Optional JSON string for the "indexing" field of the collection. ' + "See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + DropdownInput( + name="search_type", + display_name="Search Type", + info="Search type to use", + options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], + value="Similarity", + advanced=True, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results. " + "(when using 'Similarity with score threshold')", + value=0, + advanced=True, + ), + DictInput( + name="search_filter", + display_name="Search Metadata Filter", + info="Optional dictionary of filters to apply to the search query.", + advanced=True, + is_list=True, + ), + ] + + def del_fields(self, build_config, field_list): + for field in field_list: + if field in build_config: + del build_config[field] + + return build_config + + def insert_in_dict(self, build_config, field_name, new_parameters): + # Insert the new key-value pair after the found key + for new_field_name, new_parameter in new_parameters.items(): + # Get all the items as a list of tuples (key, value) + items = list(build_config.items()) + + # Find the index of the key to insert after + idx = len(items) + for i, (key, _) in enumerate(items): + if key == field_name: + idx = i + 1 + break + + items.insert(idx, (new_field_name, new_parameter)) + + # Clear the original dictionary and update with the modified items + build_config.clear() + build_config.update(items) + + return build_config + + def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None): + if field_name == "embedding_choice": + if field_value == "Astra Vectorize": + self.del_fields(build_config, ["embedding"]) + + new_parameter = DropdownInput( + name="embedding_provider", + display_name="Embedding Provider", + options=self.VECTORIZE_PROVIDERS_MAPPING.keys(), + value="", + required=True, + real_time_refresh=True, + ).to_dict() + + self.insert_in_dict(build_config, "embedding_choice", {"embedding_provider": new_parameter}) + else: + self.del_fields( + build_config, + [ + "embedding_provider", + "model", + "z_01_model_parameters", + "z_02_api_key_name", + "z_03_provider_api_key", + "z_04_authentication", + ], + ) + + new_parameter = HandleInput( + name="embedding", + display_name="Embedding Model", + input_types=["Embeddings"], + info="Allows an embedding model configuration.", + ).to_dict() + + self.insert_in_dict(build_config, "embedding_choice", {"embedding": new_parameter}) + + elif field_name == "embedding_provider": + self.del_fields( + build_config, + ["model", "z_01_model_parameters", "z_02_api_key_name", "z_03_provider_api_key", "z_04_authentication"], + ) + + model_options = self.VECTORIZE_PROVIDERS_MAPPING[field_value][1] + + new_parameter = DropdownInput( + name="model", + display_name="Model", + info="The embedding model to use for the selected provider. Each provider has a different set of " + "models available (full list at " + "https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\n\n" + f"{', '.join(model_options)}", + options=model_options, + value=None, + required=True, + real_time_refresh=True, + ).to_dict() + + self.insert_in_dict(build_config, "embedding_provider", {"model": new_parameter}) + + elif field_name == "model": + self.del_fields( + build_config, + ["z_01_model_parameters", "z_02_api_key_name", "z_03_provider_api_key", "z_04_authentication"], + ) + + new_parameter_1 = DictInput( + name="z_01_model_parameters", + display_name="Model Parameters", + is_list=True, + ).to_dict() + + new_parameter_2 = MessageTextInput( + name="z_02_api_key_name", + display_name="API Key Name", + info="The name of the embeddings provider API key stored on Astra. " + "If set, it will override the 'ProviderKey' in the authentication parameters.", + ).to_dict() + + new_parameter_3 = SecretStrInput( + load_from_db=False, + name="z_03_provider_api_key", + display_name="Provider API Key", + info="An alternative to the Astra Authentication that passes an API key for the provider " + "with each request to Astra DB. " + "This may be used when Vectorize is configured for the collection, " + "but no corresponding provider secret is stored within Astra's key management system.", + ).to_dict() + + new_parameter_4 = DictInput( + name="z_04_authentication", + display_name="Authentication Parameters", + is_list=True, + ).to_dict() + + self.insert_in_dict( + build_config, + "model", + { + "z_01_model_parameters": new_parameter_1, + "z_02_api_key_name": new_parameter_2, + "z_03_provider_api_key": new_parameter_3, + "z_04_authentication": new_parameter_4, + }, + ) + + return build_config + + def build_vectorize_options(self, **kwargs): + for attribute in [ + "embedding_provider", + "model", + "z_01_model_parameters", + "z_02_api_key_name", + "z_03_provider_api_key", + "z_04_authentication", + ]: + if not hasattr(self, attribute): + setattr(self, attribute, None) + + # Fetch values from kwargs if any self.* attributes are None + provider_value = self.VECTORIZE_PROVIDERS_MAPPING.get(self.embedding_provider, [None])[0] or kwargs.get( + "embedding_provider" + ) + model_name = self.model or kwargs.get("model") + authentication = {**(self.z_04_authentication or kwargs.get("z_04_authentication", {}))} + parameters = self.z_01_model_parameters or kwargs.get("z_01_model_parameters", {}) + + # Set the API key name if provided + api_key_name = self.z_02_api_key_name or kwargs.get("z_02_api_key_name") + provider_key = self.z_03_provider_api_key or kwargs.get("z_03_provider_api_key") + if api_key_name: + authentication["providerKey"] = api_key_name + + # Set authentication and parameters to None if no values are provided + if not authentication: + authentication = None + if not parameters: + parameters = None + + return { + # must match astrapy.info.CollectionVectorServiceOptions + "collection_vector_service_options": { + "provider": provider_value, + "modelName": model_name, + "authentication": authentication, + "parameters": parameters, + }, + "collection_embedding_api_key": provider_key, + } + + @check_cached_vector_store + def build_vector_store(self, vectorize_options=None): + try: + from langchain_astradb import AstraDBVectorStore + from langchain_astradb.utils.astradb import SetupMode + except ImportError as e: + msg = ( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + raise ImportError(msg) from e + + try: + if not self.setup_mode: + self.setup_mode = self._inputs["setup_mode"].options[0] + + setup_mode_value = SetupMode[self.setup_mode.upper()] + except KeyError as e: + msg = f"Invalid setup mode: {self.setup_mode}" + raise ValueError(msg) from e + + if self.embedding_choice == "Embedding Model": + embedding_dict = {"embedding": self.embedding} + else: + from astrapy.info import CollectionVectorServiceOptions + + # Fetch values from kwargs if any self.* attributes are None + dict_options = vectorize_options or self.build_vectorize_options() + + # Set the embedding dictionary + embedding_dict = { + "collection_vector_service_options": CollectionVectorServiceOptions.from_dict( + dict_options.get("collection_vector_service_options") + ), + "collection_embedding_api_key": dict_options.get("collection_embedding_api_key"), + } + + try: + vector_store = AstraDBVectorStore( + collection_name=self.collection_name, + token=self.token, + api_endpoint=self.api_endpoint, + namespace=self.namespace or None, + environment=parse_api_endpoint(self.api_endpoint).environment if self.api_endpoint else None, + metric=self.metric or None, + batch_size=self.batch_size or None, + bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None, + bulk_insert_overwrite_concurrency=self.bulk_insert_overwrite_concurrency or None, + bulk_delete_concurrency=self.bulk_delete_concurrency or None, + setup_mode=setup_mode_value, + pre_delete_collection=self.pre_delete_collection, + metadata_indexing_include=[s for s in self.metadata_indexing_include if s] or None, + metadata_indexing_exclude=[s for s in self.metadata_indexing_exclude if s] or None, + collection_indexing_policy=orjson.dumps(self.collection_indexing_policy) + if self.collection_indexing_policy + else None, + **embedding_dict, + ) + except Exception as e: + msg = f"Error initializing AstraDBVectorStore: {e}" + raise ValueError(msg) from e + + self._add_documents_to_vector_store(vector_store) + + return vector_store + + def _add_documents_to_vector_store(self, vector_store) -> None: + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + msg = "Vector Store Inputs must be Data objects." + raise TypeError(msg) + + if documents: + self.log(f"Adding {len(documents)} documents to the Vector Store.") + try: + vector_store.add_documents(documents) + except Exception as e: + msg = f"Error adding documents to AstraDBVectorStore: {e}" + raise ValueError(msg) from e + else: + self.log("No documents to add to the Vector Store.") + + def _map_search_type(self) -> str: + if self.search_type == "Similarity with score threshold": + return "similarity_score_threshold" + if self.search_type == "MMR (Max Marginal Relevance)": + return "mmr" + return "similarity" + + def _build_search_args(self): + args = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + } + + if self.search_filter: + clean_filter = {k: v for k, v in self.search_filter.items() if k and v} + if len(clean_filter) > 0: + args["filter"] = clean_filter + return args + + def search_documents(self, vector_store=None) -> list[Data]: + if not vector_store: + vector_store = self.build_vector_store() + + self.log(f"Search input: {self.search_input}") + self.log(f"Search type: {self.search_type}") + self.log(f"Number of results: {self.number_of_results}") + + if self.search_input and isinstance(self.search_input, str) and self.search_input.strip(): + try: + search_type = self._map_search_type() + search_args = self._build_search_args() + + docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) + except Exception as e: + msg = f"Error performing search in AstraDBVectorStore: {e}" + raise ValueError(msg) from e + + self.log(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + self.log(f"Converted documents to data: {len(data)}") + self.status = data + return data + self.log("No search input provided. Skipping search.") + return [] + + def get_retriever_kwargs(self): + search_args = self._build_search_args() + return { + "search_type": self._map_search_type(), + "search_kwargs": search_args, + } diff --git a/src/backend/base/langflow/components/vectorstores/astradb_graph.py b/src/backend/base/langflow/components/vectorstores/astradb_graph.py new file mode 100644 index 000000000000..73a896c4f5cc --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/astradb_graph.py @@ -0,0 +1,307 @@ +import os + +import orjson +from astrapy.admin import parse_api_endpoint +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers import docs_to_data +from langflow.inputs import DictInput, FloatInput +from langflow.io import ( + BoolInput, + DataInput, + DropdownInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class AstraGraphVectorStoreComponent(LCVectorStoreComponent): + display_name: str = "Astra DB Graph" + description: str = "Implementation of Graph Vector Store using Astra DB" + documentation: str = "https://python.langchain.com/api_reference/astradb/graph_vectorstores/langchain_astradb.graph_vectorstores.AstraDBGraphVectorStore.html" + name = "AstraDBGraph" + icon: str = "AstraDB" + + inputs = [ + SecretStrInput( + name="token", + display_name="Astra DB Application Token", + info="Authentication token for accessing Astra DB.", + value="ASTRA_DB_APPLICATION_TOKEN", + required=True, + advanced=os.getenv("ASTRA_ENHANCED", "false").lower() == "true", + ), + SecretStrInput( + name="api_endpoint", + display_name="Database" if os.getenv("ASTRA_ENHANCED", "false").lower() == "true" else "API Endpoint", + info="API endpoint URL for the Astra DB service.", + value="ASTRA_DB_API_ENDPOINT", + required=True, + ), + StrInput( + name="collection_name", + display_name="Collection Name", + info="The name of the collection within Astra DB where the vectors will be stored.", + required=True, + ), + StrInput( + name="metadata_incoming_links_key", + display_name="Metadata incoming links key", + info="Metadata key used for incoming links.", + advanced=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Optional namespace within Astra DB to use for the collection.", + advanced=True, + ), + MultilineInput( + name="search_input", + display_name="Search Input", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Optional namespace within Astra DB to use for the collection.", + advanced=True, + ), + HandleInput( + name="embedding", + display_name="Embedding Model", + input_types=["Embeddings"], + info="Embedding model.", + required=True, + ), + DropdownInput( + name="metric", + display_name="Metric", + info="Optional distance metric for vector comparisons in the vector store.", + options=["cosine", "dot_product", "euclidean"], + value="cosine", + advanced=True, + ), + IntInput( + name="batch_size", + display_name="Batch Size", + info="Optional number of data to process in a single batch.", + advanced=True, + ), + IntInput( + name="bulk_insert_batch_concurrency", + display_name="Bulk Insert Batch Concurrency", + info="Optional concurrency level for bulk insert operations.", + advanced=True, + ), + IntInput( + name="bulk_insert_overwrite_concurrency", + display_name="Bulk Insert Overwrite Concurrency", + info="Optional concurrency level for bulk insert operations that overwrite existing data.", + advanced=True, + ), + IntInput( + name="bulk_delete_concurrency", + display_name="Bulk Delete Concurrency", + info="Optional concurrency level for bulk delete operations.", + advanced=True, + ), + DropdownInput( + name="setup_mode", + display_name="Setup Mode", + info="Configuration mode for setting up the vector store, with options like 'Sync', or 'Off'.", + options=["Sync", "Off"], + advanced=True, + value="Sync", + ), + BoolInput( + name="pre_delete_collection", + display_name="Pre Delete Collection", + info="Boolean flag to determine whether to delete the collection before creating a new one.", + advanced=True, + value=False, + ), + StrInput( + name="metadata_indexing_include", + display_name="Metadata Indexing Include", + info="Optional list of metadata fields to include in the indexing.", + advanced=True, + is_list=True, + ), + StrInput( + name="metadata_indexing_exclude", + display_name="Metadata Indexing Exclude", + info="Optional list of metadata fields to exclude from the indexing.", + advanced=True, + is_list=True, + ), + StrInput( + name="collection_indexing_policy", + display_name="Collection Indexing Policy", + info='Optional JSON string for the "indexing" field of the collection. ' + "See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + DropdownInput( + name="search_type", + display_name="Search Type", + info="Search type to use", + options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], + value="Similarity", + advanced=True, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results. " + "(when using 'Similarity with score threshold')", + value=0, + advanced=True, + ), + DictInput( + name="search_filter", + display_name="Search Metadata Filter", + info="Optional dictionary of filters to apply to the search query.", + advanced=True, + is_list=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self): + try: + from langchain_astradb import AstraDBGraphVectorStore + from langchain_astradb.utils.astradb import SetupMode + except ImportError as e: + msg = ( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + raise ImportError(msg) from e + + try: + if not self.setup_mode: + self.setup_mode = self._inputs["setup_mode"].options[0] + + setup_mode_value = SetupMode[self.setup_mode.upper()] + except KeyError as e: + msg = f"Invalid setup mode: {self.setup_mode}" + raise ValueError(msg) from e + + try: + vector_store = AstraDBGraphVectorStore( + embedding=self.embedding, + collection_name=self.collection_name, + metadata_incoming_links_key=self.metadata_incoming_links_key or "incoming_links", + token=self.token, + api_endpoint=self.api_endpoint, + namespace=self.namespace or None, + environment=parse_api_endpoint(self.api_endpoint).environment if self.api_endpoint else None, + metric=self.metric or None, + batch_size=self.batch_size or None, + bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None, + bulk_insert_overwrite_concurrency=self.bulk_insert_overwrite_concurrency or None, + bulk_delete_concurrency=self.bulk_delete_concurrency or None, + setup_mode=setup_mode_value, + pre_delete_collection=self.pre_delete_collection, + metadata_indexing_include=[s for s in self.metadata_indexing_include if s] or None, + metadata_indexing_exclude=[s for s in self.metadata_indexing_exclude if s] or None, + collection_indexing_policy=orjson.dumps(self.collection_indexing_policy) + if self.collection_indexing_policy + else None, + ) + except Exception as e: + msg = f"Error initializing AstraDBGraphVectorStore: {e}" + raise ValueError(msg) from e + + self._add_documents_to_vector_store(vector_store) + + return vector_store + + def _add_documents_to_vector_store(self, vector_store) -> None: + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + msg = "Vector Store Inputs must be Data objects." + raise TypeError(msg) + + if documents: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + try: + vector_store.add_documents(documents) + except Exception as e: + msg = f"Error adding documents to AstraDBGraphVectorStore: {e}" + raise ValueError(msg) from e + else: + logger.debug("No documents to add to the Vector Store.") + + def _map_search_type(self) -> str: + if self.search_type == "Similarity with score threshold": + return "similarity_score_threshold" + if self.search_type == "MMR (Max Marginal Relevance)": + return "mmr" + return "similarity" + + def _build_search_args(self): + args = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + } + + if self.search_filter: + clean_filter = {k: v for k, v in self.search_filter.items() if k and v} + if len(clean_filter) > 0: + args["filter"] = clean_filter + return args + + def search_documents(self, vector_store=None) -> list[Data]: + if not vector_store: + vector_store = self.build_vector_store() + + logger.debug(f"Search input: {self.search_input}") + logger.debug(f"Search type: {self.search_type}") + logger.debug(f"Number of results: {self.number_of_results}") + + if self.search_input and isinstance(self.search_input, str) and self.search_input.strip(): + try: + search_type = self._map_search_type() + search_args = self._build_search_args() + + docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) + except Exception as e: + msg = f"Error performing search in AstraDBGraphVectorStore: {e}" + raise ValueError(msg) from e + + logger.debug(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + logger.debug(f"Converted documents to data: {len(data)}") + self.status = data + return data + logger.debug("No search input provided. Skipping search.") + return [] + + def get_retriever_kwargs(self): + search_args = self._build_search_args() + return { + "search_type": self._map_search_type(), + "search_kwargs": search_args, + } diff --git a/src/backend/base/langflow/components/vectorstores/cassandra.py b/src/backend/base/langflow/components/vectorstores/cassandra.py new file mode 100644 index 000000000000..b2db432e91bb --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/cassandra.py @@ -0,0 +1,269 @@ +from langchain_community.vectorstores import Cassandra +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.inputs import BoolInput, DictInput, FloatInput +from langflow.io import ( + DataInput, + DropdownInput, + HandleInput, + IntInput, + MessageTextInput, + MultilineInput, + SecretStrInput, +) +from langflow.schema import Data + + +class CassandraVectorStoreComponent(LCVectorStoreComponent): + display_name = "Cassandra" + description = "Cassandra Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/cassandra" + name = "Cassandra" + icon = "Cassandra" + + inputs = [ + MessageTextInput( + name="database_ref", + display_name="Contact Points / Astra Database ID", + info="Contact points for the database (or AstraDB database ID)", + required=True, + ), + MessageTextInput( + name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)." + ), + SecretStrInput( + name="token", + display_name="Password / AstraDB Token", + info="User password for the database (or AstraDB token).", + required=True, + ), + MessageTextInput( + name="keyspace", + display_name="Keyspace", + info="Table Keyspace (or AstraDB namespace).", + required=True, + ), + MessageTextInput( + name="table_name", + display_name="Table Name", + info="The name of the table (or AstraDB collection) where vectors will be stored.", + required=True, + ), + IntInput( + name="ttl_seconds", + display_name="TTL Seconds", + info="Optional time-to-live for the added texts.", + advanced=True, + ), + IntInput( + name="batch_size", + display_name="Batch Size", + info="Optional number of data to process in a single batch.", + value=16, + advanced=True, + ), + DropdownInput( + name="setup_mode", + display_name="Setup Mode", + info="Configuration mode for setting up the Cassandra table, with options like 'Sync', 'Async', or 'Off'.", + options=["Sync", "Async", "Off"], + value="Sync", + advanced=True, + ), + DictInput( + name="cluster_kwargs", + display_name="Cluster arguments", + info="Optional dictionary of additional keyword arguments for the Cassandra cluster.", + advanced=True, + is_list=True, + ), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + DropdownInput( + name="search_type", + display_name="Search Type", + info="Search type to use", + options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], + value="Similarity", + advanced=True, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results. " + "(when using 'Similarity with score threshold')", + value=0, + advanced=True, + ), + DictInput( + name="search_filter", + display_name="Search Metadata Filter", + info="Optional dictionary of filters to apply to the search query.", + advanced=True, + is_list=True, + ), + MessageTextInput( + name="body_search", + display_name="Search Body", + info="Document textual search terms to apply to the search query.", + advanced=True, + ), + BoolInput( + name="enable_body_search", + display_name="Enable Body Search", + info="Flag to enable body search. This must be enabled BEFORE the table is created.", + value=False, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Cassandra: + try: + import cassio + from langchain_community.utilities.cassandra import SetupMode + except ImportError as e: + msg = "Could not import cassio integration package. Please install it with `pip install cassio`." + raise ImportError(msg) from e + + from uuid import UUID + + database_ref = self.database_ref + + try: + UUID(self.database_ref) + is_astra = True + except ValueError: + is_astra = False + if "," in self.database_ref: + # use a copy because we can't change the type of the parameter + database_ref = self.database_ref.split(",") + + if is_astra: + cassio.init( + database_id=database_ref, + token=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + else: + cassio.init( + contact_points=database_ref, + username=self.username, + password=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + documents = [] + + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + body_index_options = [("index_analyzer", "STANDARD")] if self.enable_body_search else None + + if self.setup_mode == "Off": + setup_mode = SetupMode.OFF + elif self.setup_mode == "Sync": + setup_mode = SetupMode.SYNC + else: + setup_mode = SetupMode.ASYNC + + if documents: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + table = Cassandra.from_documents( + documents=documents, + embedding=self.embedding, + table_name=self.table_name, + keyspace=self.keyspace, + ttl_seconds=self.ttl_seconds or None, + batch_size=self.batch_size, + body_index_options=body_index_options, + ) + else: + logger.debug("No documents to add to the Vector Store.") + table = Cassandra( + embedding=self.embedding, + table_name=self.table_name, + keyspace=self.keyspace, + ttl_seconds=self.ttl_seconds or None, + body_index_options=body_index_options, + setup_mode=setup_mode, + ) + return table + + def _map_search_type(self) -> str: + if self.search_type == "Similarity with score threshold": + return "similarity_score_threshold" + if self.search_type == "MMR (Max Marginal Relevance)": + return "mmr" + return "similarity" + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + logger.debug(f"Search input: {self.search_query}") + logger.debug(f"Search type: {self.search_type}") + logger.debug(f"Number of results: {self.number_of_results}") + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + try: + search_type = self._map_search_type() + search_args = self._build_search_args() + + logger.debug(f"Search args: {search_args}") + + docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args) + except KeyError as e: + if "content" in str(e): + msg = ( + "You should ingest data through Langflow (or LangChain) to query it in Langflow. " + "Your collection does not contain a field name 'content'." + ) + raise ValueError(msg) from e + raise + + logger.debug(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + self.status = data + return data + return [] + + def _build_search_args(self): + args = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + } + + if self.search_filter: + clean_filter = {k: v for k, v in self.search_filter.items() if k and v} + if len(clean_filter) > 0: + args["filter"] = clean_filter + if self.body_search: + if not self.enable_body_search: + msg = "You should enable body search when creating the table to search the body field." + raise ValueError(msg) + args["body_search"] = self.body_search + return args + + def get_retriever_kwargs(self): + search_args = self._build_search_args() + return { + "search_type": self._map_search_type(), + "search_kwargs": search_args, + } diff --git a/src/backend/base/langflow/components/vectorstores/cassandra_graph.py b/src/backend/base/langflow/components/vectorstores/cassandra_graph.py new file mode 100644 index 000000000000..740ed003e18f --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/cassandra_graph.py @@ -0,0 +1,244 @@ +from uuid import UUID + +from langchain_community.graph_vectorstores import CassandraGraphVectorStore +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.inputs import DictInput, FloatInput +from langflow.io import ( + DataInput, + DropdownInput, + HandleInput, + IntInput, + MessageTextInput, + MultilineInput, + SecretStrInput, +) +from langflow.schema import Data + + +class CassandraGraphVectorStoreComponent(LCVectorStoreComponent): + display_name = "Cassandra Graph" + description = "Cassandra Graph Vector Store" + documentation = "https://python.langchain.com/v0.2/api_reference/community/graph_vectorstores.html" + name = "CassandraGraph" + icon = "Cassandra" + + inputs = [ + MessageTextInput( + name="database_ref", + display_name="Contact Points / Astra Database ID", + info="Contact points for the database (or AstraDB database ID)", + required=True, + ), + MessageTextInput( + name="username", display_name="Username", info="Username for the database (leave empty for AstraDB)." + ), + SecretStrInput( + name="token", + display_name="Password / AstraDB Token", + info="User password for the database (or AstraDB token).", + required=True, + ), + MessageTextInput( + name="keyspace", + display_name="Keyspace", + info="Table Keyspace (or AstraDB namespace).", + required=True, + ), + MessageTextInput( + name="table_name", + display_name="Table Name", + info="The name of the table (or AstraDB collection) where vectors will be stored.", + required=True, + ), + DropdownInput( + name="setup_mode", + display_name="Setup Mode", + info="Configuration mode for setting up the Cassandra table, with options like 'Sync' or 'Off'.", + options=["Sync", "Off"], + value="Sync", + advanced=True, + ), + DictInput( + name="cluster_kwargs", + display_name="Cluster arguments", + info="Optional dictionary of additional keyword arguments for the Cassandra cluster.", + advanced=True, + is_list=True, + ), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + DropdownInput( + name="search_type", + display_name="Search Type", + info="Search type to use", + options=[ + "Traversal", + "MMR traversal", + "Similarity", + "Similarity with score threshold", + "MMR (Max Marginal Relevance)", + ], + value="Traversal", + advanced=True, + ), + IntInput( + name="depth", + display_name="Depth of traversal", + info="The maximum depth of edges to traverse. (when using 'Traversal' or 'MMR traversal')", + value=1, + advanced=True, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results. " + "(when using 'Similarity with score threshold')", + value=0, + advanced=True, + ), + DictInput( + name="search_filter", + display_name="Search Metadata Filter", + info="Optional dictionary of filters to apply to the search query.", + advanced=True, + is_list=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> CassandraGraphVectorStore: + try: + import cassio + from langchain_community.utilities.cassandra import SetupMode + except ImportError as e: + msg = "Could not import cassio integration package. Please install it with `pip install cassio`." + raise ImportError(msg) from e + + database_ref = self.database_ref + + try: + UUID(self.database_ref) + is_astra = True + except ValueError: + is_astra = False + if "," in self.database_ref: + # use a copy because we can't change the type of the parameter + database_ref = self.database_ref.split(",") + + if is_astra: + cassio.init( + database_id=database_ref, + token=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + else: + cassio.init( + contact_points=database_ref, + username=self.username, + password=self.token, + cluster_kwargs=self.cluster_kwargs, + ) + documents = [] + + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + setup_mode = SetupMode.OFF if self.setup_mode == "Off" else SetupMode.SYNC + + if documents: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + store = CassandraGraphVectorStore.from_documents( + documents=documents, + embedding=self.embedding, + node_table=self.table_name, + keyspace=self.keyspace, + ) + else: + logger.debug("No documents to add to the Vector Store.") + store = CassandraGraphVectorStore( + embedding=self.embedding, + node_table=self.table_name, + keyspace=self.keyspace, + setup_mode=setup_mode, + ) + return store + + def _map_search_type(self) -> str: + if self.search_type == "Similarity": + return "similarity" + if self.search_type == "Similarity with score threshold": + return "similarity_score_threshold" + if self.search_type == "MMR (Max Marginal Relevance)": + return "mmr" + if self.search_type == "MMR Traversal": + return "mmr_traversal" + return "traversal" + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + logger.debug(f"Search input: {self.search_query}") + logger.debug(f"Search type: {self.search_type}") + logger.debug(f"Number of results: {self.number_of_results}") + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + try: + search_type = self._map_search_type() + search_args = self._build_search_args() + + logger.debug(f"Search args: {search_args}") + + docs = vector_store.search(query=self.search_query, search_type=search_type, **search_args) + except KeyError as e: + if "content" in str(e): + msg = ( + "You should ingest data through Langflow (or LangChain) to query it in Langflow. " + "Your collection does not contain a field name 'content'." + ) + raise ValueError(msg) from e + raise + + logger.debug(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + self.status = data + return data + return [] + + def _build_search_args(self): + args = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + "depth": self.depth, + } + + if self.search_filter: + clean_filter = {k: v for k, v in self.search_filter.items() if k and v} + if len(clean_filter) > 0: + args["filter"] = clean_filter + return args + + def get_retriever_kwargs(self): + search_args = self._build_search_args() + return { + "search_type": self._map_search_type(), + "search_kwargs": search_args, + } diff --git a/src/backend/base/langflow/components/vectorstores/chroma.py b/src/backend/base/langflow/components/vectorstores/chroma.py new file mode 100644 index 000000000000..de31f1503471 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/chroma.py @@ -0,0 +1,159 @@ +from copy import deepcopy + +from chromadb.config import Settings +from langchain_chroma import Chroma +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.base.vectorstores.utils import chroma_collection_to_data +from langflow.io import BoolInput, DataInput, DropdownInput, HandleInput, IntInput, MultilineInput, StrInput +from langflow.schema import Data + + +class ChromaVectorStoreComponent(LCVectorStoreComponent): + """Chroma Vector Store with search capabilities.""" + + display_name: str = "Chroma DB" + description: str = "Chroma Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma" + name = "Chroma" + icon = "Chroma" + + inputs = [ + StrInput( + name="collection_name", + display_name="Collection Name", + value="langflow", + ), + StrInput( + name="persist_directory", + display_name="Persist Directory", + ), + MultilineInput( + name="search_query", + display_name="Search Query", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + StrInput( + name="chroma_server_cors_allow_origins", + display_name="Server CORS Allow Origins", + advanced=True, + ), + StrInput( + name="chroma_server_host", + display_name="Server Host", + advanced=True, + ), + IntInput( + name="chroma_server_http_port", + display_name="Server HTTP Port", + advanced=True, + ), + IntInput( + name="chroma_server_grpc_port", + display_name="Server gRPC Port", + advanced=True, + ), + BoolInput( + name="chroma_server_ssl_enabled", + display_name="Server SSL Enabled", + advanced=True, + ), + BoolInput( + name="allow_duplicates", + display_name="Allow Duplicates", + advanced=True, + info="If false, will not add documents that are already in the Vector Store.", + ), + DropdownInput( + name="search_type", + display_name="Search Type", + options=["Similarity", "MMR"], + value="Similarity", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=10, + ), + IntInput( + name="limit", + display_name="Limit", + advanced=True, + info="Limit the number of records to compare when Allow Duplicates is False.", + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Chroma: + """Builds the Chroma object.""" + try: + from chromadb import Client + from langchain_chroma import Chroma + except ImportError as e: + msg = "Could not import Chroma integration package. Please install it with `pip install langchain-chroma`." + raise ImportError(msg) from e + # Chroma settings + chroma_settings = None + client = None + if self.chroma_server_host: + chroma_settings = Settings( + chroma_server_cors_allow_origins=self.chroma_server_cors_allow_origins or [], + chroma_server_host=self.chroma_server_host, + chroma_server_http_port=self.chroma_server_http_port or None, + chroma_server_grpc_port=self.chroma_server_grpc_port or None, + chroma_server_ssl_enabled=self.chroma_server_ssl_enabled, + ) + client = Client(settings=chroma_settings) + + # Check persist_directory and expand it if it is a relative path + persist_directory = self.resolve_path(self.persist_directory) if self.persist_directory is not None else None + + chroma = Chroma( + persist_directory=persist_directory, + client=client, + embedding_function=self.embedding, + collection_name=self.collection_name, + ) + + self._add_documents_to_vector_store(chroma) + self.status = chroma_collection_to_data(chroma.get(limit=self.limit)) + return chroma + + def _add_documents_to_vector_store(self, vector_store: "Chroma") -> None: + """Adds documents to the Vector Store.""" + if not self.ingest_data: + self.status = "" + return + + _stored_documents_without_id = [] + if self.allow_duplicates: + stored_data = [] + else: + stored_data = chroma_collection_to_data(vector_store.get(limit=self.limit)) + for value in deepcopy(stored_data): + del value.id + _stored_documents_without_id.append(value) + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + if _input not in _stored_documents_without_id: + documents.append(_input.to_lc_document()) + else: + msg = "Vector Store Inputs must be Data objects." + raise TypeError(msg) + + if documents and self.embedding is not None: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + vector_store.add_documents(documents) + else: + logger.debug("No documents to add to the Vector Store.") diff --git a/src/backend/base/langflow/components/vectorstores/clickhouse.py b/src/backend/base/langflow/components/vectorstores/clickhouse.py new file mode 100644 index 000000000000..cfd81ff77692 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/clickhouse.py @@ -0,0 +1,134 @@ +from langchain_community.vectorstores import Clickhouse, ClickhouseSettings + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.inputs import BoolInput, FloatInput +from langflow.io import ( + DataInput, + DictInput, + DropdownInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class ClickhouseVectorStoreComponent(LCVectorStoreComponent): + display_name = "Clickhouse" + description = "Clickhouse Vector Store with search capabilities" + documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/clickhouse/" + name = "Clickhouse" + icon = "Clickhouse" + + inputs = [ + StrInput(name="host", display_name="hostname", required=True, value="localhost"), + IntInput(name="port", display_name="port", required=True, value=8123), + StrInput(name="database", display_name="database", required=True), + StrInput(name="table", display_name="Table name", required=True), + StrInput(name="username", display_name="The ClickHouse user name.", required=True), + SecretStrInput(name="password", display_name="The password for username.", required=True), + DropdownInput( + name="index_type", + display_name="index_type", + options=["annoy", "vector_similarity"], + info="Type of the index.", + value="annoy", + advanced=True, + ), + DropdownInput( + name="metric", + display_name="metric", + options=["angular", "euclidean", "manhattan", "hamming", "dot"], + info="Metric to compute distance.", + value="angular", + advanced=True, + ), + BoolInput( + name="secure", + display_name="Use https/TLS. This overrides inferred values from the interface or port arguments.", + value=False, + advanced=True, + ), + StrInput(name="index_param", display_name="Param of the index", value="'L2Distance',100", advanced=True), + DictInput(name="index_query_params", display_name="index query params", advanced=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput(name="ingest_data", display_name="Ingest Data", is_list=True), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + FloatInput(name="score_threshold", display_name="Score threshold", advanced=True), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Clickhouse: + try: + import clickhouse_connect + except ImportError as e: + msg = ( + "Failed to import Clickhouse dependencies. " + "Install it using `pip install langflow[clickhouse-connect] --pre`" + ) + raise ImportError(msg) from e + + try: + client = clickhouse_connect.get_client(host=self.host, username=self.username, password=self.password) + client.command("SELECT 1") + except Exception as e: + msg = f"Failed to connect to Clickhouse: {e}" + raise ValueError(msg) from e + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + kwargs = {} + if self.index_param: + kwargs["index_param"] = self.index_param.split(",") + if self.index_query_params: + kwargs["index_query_params"] = self.index_query_params + + settings = ClickhouseSettings( + table=self.table, + database=self.database, + host=self.host, + index_type=self.index_type, + metric=self.metric, + password=self.password, + port=self.port, + secure=self.secure, + username=self.username, + **kwargs, + ) + if documents: + clickhouse_vs = Clickhouse.from_documents(documents=documents, embedding=self.embedding, config=settings) + + else: + clickhouse_vs = Clickhouse(embedding=self.embedding, config=settings) + + return clickhouse_vs + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + kwargs = {} + if self.score_threshold: + kwargs["score_threshold"] = self.score_threshold + + docs = vector_store.similarity_search(query=self.search_query, k=self.number_of_results, **kwargs) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/couchbase.py b/src/backend/base/langflow/components/vectorstores/couchbase.py new file mode 100644 index 000000000000..622d582a7756 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/couchbase.py @@ -0,0 +1,106 @@ +from datetime import timedelta + +from langchain_community.vectorstores import CouchbaseVectorStore + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class CouchbaseVectorStoreComponent(LCVectorStoreComponent): + display_name = "Couchbase" + description = "Couchbase Vector Store with search capabilities" + documentation = "https://python.langchain.com/v0.1/docs/integrations/document_loaders/couchbase/" + name = "Couchbase" + icon = "Couchbase" + + inputs = [ + SecretStrInput( + name="couchbase_connection_string", display_name="Couchbase Cluster connection string", required=True + ), + StrInput(name="couchbase_username", display_name="Couchbase username", required=True), + SecretStrInput(name="couchbase_password", display_name="Couchbase password", required=True), + StrInput(name="bucket_name", display_name="Bucket Name", required=True), + StrInput(name="scope_name", display_name="Scope Name", required=True), + StrInput(name="collection_name", display_name="Collection Name", required=True), + StrInput(name="index_name", display_name="Index Name", required=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> CouchbaseVectorStore: + try: + from couchbase.auth import PasswordAuthenticator + from couchbase.cluster import Cluster + from couchbase.options import ClusterOptions + except ImportError as e: + msg = "Failed to import Couchbase dependencies. Install it using `pip install langflow[couchbase] --pre`" + raise ImportError(msg) from e + + try: + auth = PasswordAuthenticator(self.couchbase_username, self.couchbase_password) + options = ClusterOptions(auth) + cluster = Cluster(self.couchbase_connection_string, options) + + cluster.wait_until_ready(timedelta(seconds=5)) + except Exception as e: + msg = f"Failed to connect to Couchbase: {e}" + raise ValueError(msg) from e + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + couchbase_vs = CouchbaseVectorStore.from_documents( + documents=documents, + cluster=cluster, + bucket_name=self.bucket_name, + scope_name=self.scope_name, + collection_name=self.collection_name, + embedding=self.embedding, + index_name=self.index_name, + ) + + else: + couchbase_vs = CouchbaseVectorStore( + cluster=cluster, + bucket_name=self.bucket_name, + scope_name=self.scope_name, + collection_name=self.collection_name, + embedding=self.embedding, + index_name=self.index_name, + ) + + return couchbase_vs + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/elasticsearch.py b/src/backend/base/langflow/components/vectorstores/elasticsearch.py new file mode 100644 index 000000000000..1558a412ae6b --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/elasticsearch.py @@ -0,0 +1,250 @@ +from typing import Any + +from langchain.schema import Document +from langchain_elasticsearch import ElasticsearchStore +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.io import ( + DataInput, + DropdownInput, + FloatInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class ElasticsearchVectorStoreComponent(LCVectorStoreComponent): + """Elasticsearch Vector Store with with advanced, customizable search capabilities.""" + + display_name: str = "Elasticsearch" + description: str = "Elasticsearch Vector Store with with advanced, customizable search capabilities." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch" + name = "Elasticsearch" + icon = "ElasticsearchStore" + + inputs = [ + StrInput( + name="elasticsearch_url", + display_name="Elasticsearch URL", + value="http://localhost:9200", + info="URL for self-managed Elasticsearch deployments (e.g., http://localhost:9200). " + "Do not use with Elastic Cloud deployments, use Elastic Cloud ID instead.", + ), + SecretStrInput( + name="cloud_id", + display_name="Elastic Cloud ID", + value="", + info="Use this for Elastic Cloud deployments. Do not use together with 'Elasticsearch URL'.", + ), + StrInput( + name="index_name", + display_name="Index Name", + value="langflow", + info="The index name where the vectors will be stored in Elasticsearch cluster.", + ), + MultilineInput( + name="search_input", + display_name="Search Input", + info="Enter a search query. Leave empty to retrieve all documents.", + ), + StrInput( + name="username", + display_name="Username", + value="", + advanced=False, + info=( + "Elasticsearch username (e.g., 'elastic'). " + "Required for both local and Elastic Cloud setups unless API keys are used." + ), + ), + SecretStrInput( + name="password", + display_name="Password", + value="", + advanced=False, + info=( + "Elasticsearch password for the specified user. " + "Required for both local and Elastic Cloud setups unless API keys are used." + ), + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput( + name="embedding", + display_name="Embedding", + input_types=["Embeddings"], + ), + DropdownInput( + name="search_type", + display_name="Search Type", + options=["similarity", "mmr"], + value="similarity", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results.", + value=0.0, + advanced=True, + ), + SecretStrInput( + name="api_key", + display_name="Elastic API Key", + value="", + advanced=True, + info="API Key for Elastic Cloud authentication. If used, 'username' and 'password' are not required.", + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> ElasticsearchStore: + """Builds the Elasticsearch Vector Store object.""" + if self.cloud_id and self.elasticsearch_url: + msg = ( + "Both 'cloud_id' and 'elasticsearch_url' provided. " + "Please use only one based on your deployment (Cloud or Local)." + ) + raise ValueError(msg) + + es_params = { + "index_name": self.index_name, + "embedding": self.embedding, + "es_user": self.username or None, + "es_password": self.password or None, + } + + if self.cloud_id: + es_params["es_cloud_id"] = self.cloud_id + else: + es_params["es_url"] = self.elasticsearch_url + + if self.api_key: + es_params["api_key"] = self.api_key + + elasticsearch = ElasticsearchStore(**es_params) + + # If documents are provided, add them to the store + if self.ingest_data: + documents = self._prepare_documents() + if documents: + elasticsearch.add_documents(documents) + + return elasticsearch + + def _prepare_documents(self) -> list[Document]: + """Prepares documents from the input data to add to the vector store.""" + documents = [] + for data in self.ingest_data: + if isinstance(data, Data): + documents.append(data.to_lc_document()) + else: + error_message = "Vector Store Inputs must be Data objects." + logger.error(error_message) + raise TypeError(error_message) + return documents + + def _add_documents_to_vector_store(self, vector_store: "ElasticsearchStore") -> None: + """Adds documents to the Vector Store.""" + documents = self._prepare_documents() + if documents and self.embedding: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + vector_store.add_documents(documents) + else: + logger.debug("No documents to add to the Vector Store.") + + def search(self, query: str | None = None) -> list[dict[str, Any]]: + """Search for similar documents in the vector store or retrieve all documents if no query is provided.""" + vector_store = self.build_vector_store() + search_kwargs = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + } + + if query: + search_type = self.search_type.lower() + if search_type not in {"similarity", "mmr"}: + msg = f"Invalid search type: {self.search_type}" + logger.error(msg) + raise ValueError(msg) + try: + if search_type == "similarity": + results = vector_store.similarity_search_with_score(query, **search_kwargs) + elif search_type == "mmr": + results = vector_store.max_marginal_relevance_search(query, **search_kwargs) + except Exception as e: + msg = ( + "Error occurred while querying the Elasticsearch VectorStore," + " there is no Data into the VectorStore." + ) + logger.exception(msg) + raise ValueError(msg) from e + return [ + {"page_content": doc.page_content, "metadata": doc.metadata, "score": score} for doc, score in results + ] + results = self.get_all_documents(vector_store, **search_kwargs) + return [{"page_content": doc.page_content, "metadata": doc.metadata, "score": score} for doc, score in results] + + def get_all_documents(self, vector_store: ElasticsearchStore, **kwargs) -> list[tuple[Document, float]]: + """Retrieve all documents from the vector store.""" + client = vector_store.client + index_name = self.index_name + + query = { + "query": {"match_all": {}}, + "size": kwargs.get("k", self.number_of_results), + } + + response = client.search(index=index_name, body=query) + + results = [] + for hit in response["hits"]["hits"]: + doc = Document( + page_content=hit["_source"].get("text", ""), + metadata=hit["_source"].get("metadata", {}), + ) + score = hit["_score"] + results.append((doc, score)) + + return results + + def search_documents(self) -> list[Data]: + """Search for documents in the vector store based on the search input. + + If no search input is provided, retrieve all documents. + """ + results = self.search(self.search_input) + retrieved_data = [ + Data( + text=result["page_content"], + file_path=result["metadata"].get("file_path", ""), + ) + for result in results + ] + self.status = retrieved_data + return retrieved_data + + def get_retriever_kwargs(self): + """Get the keyword arguments for the retriever.""" + return { + "search_type": self.search_type.lower(), + "search_kwargs": { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + }, + } diff --git a/src/backend/base/langflow/components/vectorstores/faiss.py b/src/backend/base/langflow/components/vectorstores/faiss.py new file mode 100644 index 000000000000..871ffb7ef2ec --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/faiss.py @@ -0,0 +1,112 @@ +from langchain_community.vectorstores import FAISS +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import BoolInput, DataInput, HandleInput, IntInput, MultilineInput, StrInput +from langflow.schema import Data + + +class FaissVectorStoreComponent(LCVectorStoreComponent): + """FAISS Vector Store with search capabilities.""" + + display_name: str = "FAISS" + description: str = "FAISS Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" + name = "FAISS" + icon = "FAISS" + + inputs = [ + StrInput( + name="index_name", + display_name="Index Name", + value="langflow_index", + ), + StrInput( + name="persist_directory", + display_name="Persist Directory", + info="Path to save the FAISS index. It will be relative to where Langflow is running.", + ), + MultilineInput( + name="search_query", + display_name="Search Query", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + BoolInput( + name="allow_dangerous_deserialization", + display_name="Allow Dangerous Deserialization", + info="Set to True to allow loading pickle files from untrusted sources. " + "Only enable this if you trust the source of the data.", + advanced=True, + value=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> FAISS: + """Builds the FAISS object.""" + if not self.persist_directory: + msg = "Folder path is required to save the FAISS index." + raise ValueError(msg) + path = self.resolve_path(self.persist_directory) + + documents = [] + + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + faiss = FAISS.from_documents(documents=documents, embedding=self.embedding) + faiss.save_local(str(path), self.index_name) + + return faiss + + def search_documents(self) -> list[Data]: + """Search for documents in the FAISS vector store.""" + if not self.persist_directory: + msg = "Folder path is required to load the FAISS index." + raise ValueError(msg) + path = self.resolve_path(self.persist_directory) + + vector_store = FAISS.load_local( + folder_path=path, + embeddings=self.embedding, + index_name=self.index_name, + allow_dangerous_deserialization=self.allow_dangerous_deserialization, + ) + + if not vector_store: + msg = "Failed to load the FAISS index." + raise ValueError(msg) + + logger.debug(f"Search input: {self.search_query}") + logger.debug(f"Number of results: {self.number_of_results}") + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + logger.debug(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + logger.debug(f"Converted documents to data: {len(data)}") + logger.debug(data) + return data # Return the search results data + logger.debug("No search input provided. Skipping search.") + return [] diff --git a/src/backend/base/langflow/components/vectorstores/hcd.py b/src/backend/base/langflow/components/vectorstores/hcd.py new file mode 100644 index 000000000000..488a586d9aa4 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/hcd.py @@ -0,0 +1,325 @@ +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers import docs_to_data +from langflow.inputs import DictInput, FloatInput +from langflow.io import ( + BoolInput, + DataInput, + DropdownInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class HCDVectorStoreComponent(LCVectorStoreComponent): + display_name: str = "Hyper-Converged Database" + description: str = "Implementation of Vector Store using Hyper-Converged Database (HCD) with search capabilities" + documentation: str = "https://python.langchain.com/docs/integrations/vectorstores/astradb" + name = "HCD" + icon: str = "HCD" + + inputs = [ + StrInput( + name="collection_name", + display_name="Collection Name", + info="The name of the collection within HCD where the vectors will be stored.", + required=True, + ), + StrInput( + name="username", + display_name="HCD Username", + info="Authentication username for accessing HCD.", + value="hcd-superuser", + required=True, + ), + SecretStrInput( + name="password", + display_name="HCD Password", + info="Authentication password for accessing HCD.", + value="HCD_PASSWORD", + required=True, + ), + SecretStrInput( + name="api_endpoint", + display_name="HCD API Endpoint", + info="API endpoint URL for the HCD service.", + value="HCD_API_ENDPOINT", + required=True, + ), + MultilineInput( + name="search_input", + display_name="Search Input", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Optional namespace within HCD to use for the collection.", + value="default_namespace", + advanced=True, + ), + MultilineInput( + name="ca_certificate", + display_name="CA Certificate", + info="Optional CA certificate for TLS connections to HCD.", + advanced=True, + ), + DropdownInput( + name="metric", + display_name="Metric", + info="Optional distance metric for vector comparisons in the vector store.", + options=["cosine", "dot_product", "euclidean"], + advanced=True, + ), + IntInput( + name="batch_size", + display_name="Batch Size", + info="Optional number of data to process in a single batch.", + advanced=True, + ), + IntInput( + name="bulk_insert_batch_concurrency", + display_name="Bulk Insert Batch Concurrency", + info="Optional concurrency level for bulk insert operations.", + advanced=True, + ), + IntInput( + name="bulk_insert_overwrite_concurrency", + display_name="Bulk Insert Overwrite Concurrency", + info="Optional concurrency level for bulk insert operations that overwrite existing data.", + advanced=True, + ), + IntInput( + name="bulk_delete_concurrency", + display_name="Bulk Delete Concurrency", + info="Optional concurrency level for bulk delete operations.", + advanced=True, + ), + DropdownInput( + name="setup_mode", + display_name="Setup Mode", + info="Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", + options=["Sync", "Async", "Off"], + advanced=True, + value="Sync", + ), + BoolInput( + name="pre_delete_collection", + display_name="Pre Delete Collection", + info="Boolean flag to determine whether to delete the collection before creating a new one.", + advanced=True, + ), + StrInput( + name="metadata_indexing_include", + display_name="Metadata Indexing Include", + info="Optional list of metadata fields to include in the indexing.", + advanced=True, + ), + HandleInput( + name="embedding", + display_name="Embedding or Astra Vectorize", + input_types=["Embeddings", "dict"], + # TODO: This should be optional, but need to refactor langchain-astradb first. + info="Allows either an embedding model or an Astra Vectorize configuration.", + ), + StrInput( + name="metadata_indexing_exclude", + display_name="Metadata Indexing Exclude", + info="Optional list of metadata fields to exclude from the indexing.", + advanced=True, + ), + StrInput( + name="collection_indexing_policy", + display_name="Collection Indexing Policy", + info="Optional dictionary defining the indexing policy for the collection.", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + DropdownInput( + name="search_type", + display_name="Search Type", + info="Search type to use", + options=["Similarity", "Similarity with score threshold", "MMR (Max Marginal Relevance)"], + value="Similarity", + advanced=True, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results. " + "(when using 'Similarity with score threshold')", + value=0, + advanced=True, + ), + DictInput( + name="search_filter", + display_name="Search Metadata Filter", + info="Optional dictionary of filters to apply to the search query.", + advanced=True, + is_list=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self): + try: + from langchain_astradb import AstraDBVectorStore + from langchain_astradb.utils.astradb import SetupMode + except ImportError as e: + msg = ( + "Could not import langchain Astra DB integration package. " + "Please install it with `pip install langchain-astradb`." + ) + raise ImportError(msg) from e + + try: + from astrapy.authentication import UsernamePasswordTokenProvider + from astrapy.constants import Environment + except ImportError as e: + msg = "Could not import astrapy integration package. Please install it with `pip install astrapy`." + raise ImportError(msg) from e + + try: + if not self.setup_mode: + self.setup_mode = self._inputs["setup_mode"].options[0] + + setup_mode_value = SetupMode[self.setup_mode.upper()] + except KeyError as e: + msg = f"Invalid setup mode: {self.setup_mode}" + raise ValueError(msg) from e + + if not isinstance(self.embedding, dict): + embedding_dict = {"embedding": self.embedding} + else: + from astrapy.info import CollectionVectorServiceOptions + + dict_options = self.embedding.get("collection_vector_service_options", {}) + dict_options["authentication"] = { + k: v for k, v in dict_options.get("authentication", {}).items() if k and v + } + dict_options["parameters"] = {k: v for k, v in dict_options.get("parameters", {}).items() if k and v} + embedding_dict = { + "collection_vector_service_options": CollectionVectorServiceOptions.from_dict(dict_options) + } + collection_embedding_api_key = self.embedding.get("collection_embedding_api_key") + if collection_embedding_api_key: + embedding_dict["collection_embedding_api_key"] = collection_embedding_api_key + + token_provider = UsernamePasswordTokenProvider(self.username, self.password) + vector_store_kwargs = { + **embedding_dict, + "collection_name": self.collection_name, + "token": token_provider, + "api_endpoint": self.api_endpoint, + "namespace": self.namespace, + "metric": self.metric or None, + "batch_size": self.batch_size or None, + "bulk_insert_batch_concurrency": self.bulk_insert_batch_concurrency or None, + "bulk_insert_overwrite_concurrency": self.bulk_insert_overwrite_concurrency or None, + "bulk_delete_concurrency": self.bulk_delete_concurrency or None, + "setup_mode": setup_mode_value, + "pre_delete_collection": self.pre_delete_collection or False, + "environment": Environment.HCD, + } + + if self.metadata_indexing_include: + vector_store_kwargs["metadata_indexing_include"] = self.metadata_indexing_include + elif self.metadata_indexing_exclude: + vector_store_kwargs["metadata_indexing_exclude"] = self.metadata_indexing_exclude + elif self.collection_indexing_policy: + vector_store_kwargs["collection_indexing_policy"] = self.collection_indexing_policy + + try: + vector_store = AstraDBVectorStore(**vector_store_kwargs) + except Exception as e: + msg = f"Error initializing AstraDBVectorStore: {e}" + raise ValueError(msg) from e + + self._add_documents_to_vector_store(vector_store) + return vector_store + + def _add_documents_to_vector_store(self, vector_store) -> None: + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + msg = "Vector Store Inputs must be Data objects." + raise TypeError(msg) + + if documents: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + try: + vector_store.add_documents(documents) + except Exception as e: + msg = f"Error adding documents to AstraDBVectorStore: {e}" + raise ValueError(msg) from e + else: + logger.debug("No documents to add to the Vector Store.") + + def _map_search_type(self) -> str: + if self.search_type == "Similarity with score threshold": + return "similarity_score_threshold" + if self.search_type == "MMR (Max Marginal Relevance)": + return "mmr" + return "similarity" + + def _build_search_args(self): + args = { + "k": self.number_of_results, + "score_threshold": self.search_score_threshold, + } + + if self.search_filter: + clean_filter = {k: v for k, v in self.search_filter.items() if k and v} + if len(clean_filter) > 0: + args["filter"] = clean_filter + return args + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + logger.debug(f"Search input: {self.search_input}") + logger.debug(f"Search type: {self.search_type}") + logger.debug(f"Number of results: {self.number_of_results}") + + if self.search_input and isinstance(self.search_input, str) and self.search_input.strip(): + try: + search_type = self._map_search_type() + search_args = self._build_search_args() + + docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args) + except Exception as e: + msg = f"Error performing search in AstraDBVectorStore: {e}" + raise ValueError(msg) from e + + logger.debug(f"Retrieved documents: {len(docs)}") + + data = docs_to_data(docs) + logger.debug(f"Converted documents to data: {len(data)}") + self.status = data + return data + logger.debug("No search input provided. Skipping search.") + return [] + + def get_retriever_kwargs(self): + search_args = self._build_search_args() + return { + "search_type": self._map_search_type(), + "search_kwargs": search_args, + } diff --git a/src/backend/base/langflow/components/vectorstores/milvus.py b/src/backend/base/langflow/components/vectorstores/milvus.py new file mode 100644 index 000000000000..ff83d961d64d --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/milvus.py @@ -0,0 +1,120 @@ +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import ( + BoolInput, + DataInput, + DictInput, + DropdownInput, + FloatInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class MilvusVectorStoreComponent(LCVectorStoreComponent): + """Milvus vector store with search capabilities.""" + + display_name: str = "Milvus" + description: str = "Milvus vector store with search capabilities" + documentation = "https://python.langchain.com/docs/integrations/vectorstores/milvus" + name = "Milvus" + icon = "Milvus" + + inputs = [ + StrInput(name="collection_name", display_name="Collection Name", value="langflow"), + StrInput(name="collection_description", display_name="Collection Description", value=""), + StrInput( + name="uri", + display_name="Connection URI", + value="http://localhost:19530", + ), + SecretStrInput( + name="password", + display_name="Connection Password", + value="", + info="Ignore this field if no password is required to make connection.", + ), + DictInput(name="connection_args", display_name="Other Connection Arguments", advanced=True), + StrInput(name="primary_field", display_name="Primary Field Name", value="pk"), + StrInput(name="text_field", display_name="Text Field Name", value="text"), + StrInput(name="vector_field", display_name="Vector Field Name", value="vector"), + DropdownInput( + name="consistency_level", + display_name="Consistencey Level", + options=["Bounded", "Session", "Strong", "Eventual"], + value="Session", + advanced=True, + ), + DictInput(name="index_params", display_name="Index Parameters", advanced=True), + DictInput(name="search_params", display_name="Search Parameters", advanced=True), + BoolInput(name="drop_old", display_name="Drop Old Collection", value=False, advanced=True), + FloatInput(name="timeout", display_name="Timeout", advanced=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self): + try: + from langchain_milvus.vectorstores import Milvus as LangchainMilvus + except ImportError as e: + msg = "Could not import Milvus integration package. Please install it with `pip install langchain-milvus`." + raise ImportError(msg) from e + self.connection_args.update(uri=self.uri, token=self.password) + milvus_store = LangchainMilvus( + embedding_function=self.embedding, + collection_name=self.collection_name, + collection_description=self.collection_description, + connection_args=self.connection_args, + consistency_level=self.consistency_level, + index_params=self.index_params, + search_params=self.search_params, + drop_old=self.drop_old, + auto_id=True, + primary_field=self.primary_field, + text_field=self.text_field, + vector_field=self.vector_field, + timeout=self.timeout, + ) + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + milvus_store.add_documents(documents) + + return milvus_store + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/mongodb_atlas.py b/src/backend/base/langflow/components/vectorstores/mongodb_atlas.py new file mode 100644 index 000000000000..298a0a0cdf49 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/mongodb_atlas.py @@ -0,0 +1,87 @@ +from langchain_community.vectorstores import MongoDBAtlasVectorSearch + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class MongoVectorStoreComponent(LCVectorStoreComponent): + display_name = "MongoDB Atlas" + description = "MongoDB Atlas Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas" + name = "MongoDBAtlasVector" + icon = "MongoDB" + + inputs = [ + SecretStrInput(name="mongodb_atlas_cluster_uri", display_name="MongoDB Atlas Cluster URI", required=True), + StrInput(name="db_name", display_name="Database Name", required=True), + StrInput(name="collection_name", display_name="Collection Name", required=True), + StrInput(name="index_name", display_name="Index Name", required=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> MongoDBAtlasVectorSearch: + try: + from pymongo import MongoClient + except ImportError as e: + msg = "Please install pymongo to use MongoDB Atlas Vector Store" + raise ImportError(msg) from e + + try: + mongo_client: MongoClient = MongoClient(self.mongodb_atlas_cluster_uri) + collection = mongo_client[self.db_name][self.collection_name] + except Exception as e: + msg = f"Failed to connect to MongoDB Atlas: {e}" + raise ValueError(msg) from e + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + return MongoDBAtlasVectorSearch.from_documents( + documents=documents, embedding=self.embedding, collection=collection, index_name=self.index_name + ) + return MongoDBAtlasVectorSearch( + embedding=self.embedding, + collection=collection, + index_name=self.index_name, + ) + + def search_documents(self) -> list[Data]: + from bson.objectid import ObjectId + + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + for doc in docs: + doc.metadata = { + key: str(value) if isinstance(value, ObjectId) else value for key, value in doc.metadata.items() + } + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/opensearch.py b/src/backend/base/langflow/components/vectorstores/opensearch.py new file mode 100644 index 000000000000..856baaaa83ba --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/opensearch.py @@ -0,0 +1,254 @@ +import json +from typing import Any + +from langchain_community.vectorstores import OpenSearchVectorSearch +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.io import ( + BoolInput, + DataInput, + DropdownInput, + FloatInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class OpenSearchVectorStoreComponent(LCVectorStoreComponent): + """OpenSearch Vector Store with advanced, customizable search capabilities.""" + + display_name: str = "OpenSearch" + description: str = "OpenSearch Vector Store with advanced, customizable search capabilities." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/opensearch" + name = "OpenSearch" + icon = "OpenSearch" + + inputs = [ + StrInput( + name="opensearch_url", + display_name="OpenSearch URL", + value="http://localhost:9200", + info="URL for OpenSearch cluster (e.g. https://192.168.1.1:9200).", + ), + StrInput( + name="index_name", + display_name="Index Name", + value="langflow", + info="The index name where the vectors will be stored in OpenSearch cluster.", + ), + MultilineInput( + name="search_input", + display_name="Search Input", + info=( + "Enter a search query. Leave empty to retrieve all documents. " + "If you need a more advanced search consider using Hybrid Search Query instead." + ), + value="", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + DropdownInput( + name="search_type", + display_name="Search Type", + options=["similarity", "similarity_score_threshold", "mmr"], + value="similarity", + advanced=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + advanced=True, + value=4, + ), + FloatInput( + name="search_score_threshold", + display_name="Search Score Threshold", + info="Minimum similarity score threshold for search results.", + value=0.0, + advanced=True, + ), + StrInput( + name="username", + display_name="Username", + value="admin", + advanced=True, + ), + SecretStrInput( + name="password", + display_name="Password", + value="admin", + advanced=True, + ), + BoolInput( + name="use_ssl", + display_name="Use SSL", + value=True, + advanced=True, + ), + BoolInput( + name="verify_certs", + display_name="Verify Certificates", + value=False, + advanced=True, + ), + MultilineInput( + name="hybrid_search_query", + display_name="Hybrid Search Query", + value="", + advanced=True, + info=( + "Provide a custom hybrid search query in JSON format. This allows you to combine " + "vector similarity and keyword matching." + ), + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> OpenSearchVectorSearch: + """Builds the OpenSearch Vector Store object.""" + try: + from langchain_community.vectorstores import OpenSearchVectorSearch + except ImportError as e: + error_message = f"Failed to import required modules: {e}" + logger.exception(error_message) + raise ImportError(error_message) from e + + try: + opensearch = OpenSearchVectorSearch( + index_name=self.index_name, + embedding_function=self.embedding, + opensearch_url=self.opensearch_url, + http_auth=(self.username, self.password), + use_ssl=self.use_ssl, + verify_certs=self.verify_certs, + ssl_assert_hostname=False, + ssl_show_warn=False, + ) + except Exception as e: + error_message = f"Failed to create OpenSearchVectorSearch instance: {e}" + logger.exception(error_message) + raise RuntimeError(error_message) from e + + if self.ingest_data: + self._add_documents_to_vector_store(opensearch) + + return opensearch + + def _add_documents_to_vector_store(self, vector_store: "OpenSearchVectorSearch") -> None: + """Adds documents to the Vector Store.""" + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + error_message = f"Expected Data object, got {type(_input)}" + logger.error(error_message) + raise TypeError(error_message) + + if documents and self.embedding is not None: + logger.debug(f"Adding {len(documents)} documents to the Vector Store.") + try: + vector_store.add_documents(documents) + except Exception as e: + error_message = f"Error adding documents to Vector Store: {e}" + logger.exception(error_message) + raise RuntimeError(error_message) from e + else: + logger.debug("No documents to add to the Vector Store.") + + def search(self, query: str | None = None) -> list[dict[str, Any]]: + """Search for similar documents in the vector store or retrieve all documents if no query is provided.""" + try: + vector_store = self.build_vector_store() + + query = query or "" + + if self.hybrid_search_query.strip(): + try: + hybrid_query = json.loads(self.hybrid_search_query) + except json.JSONDecodeError as e: + error_message = f"Invalid hybrid search query JSON: {e}" + logger.exception(error_message) + raise ValueError(error_message) from e + + results = vector_store.client.search(index=self.index_name, body=hybrid_query) + + processed_results = [] + for hit in results.get("hits", {}).get("hits", []): + source = hit.get("_source", {}) + text = source.get("text", "") + metadata = source.get("metadata", {}) + + if isinstance(text, dict): + text = text.get("text", "") + + processed_results.append( + { + "page_content": text, + "metadata": metadata, + } + ) + return processed_results + + search_kwargs = {"k": self.number_of_results} + search_type = self.search_type.lower() + + if search_type == "similarity": + results = vector_store.similarity_search(query, **search_kwargs) + return [{"page_content": doc.page_content, "metadata": doc.metadata} for doc in results] + if search_type == "similarity_score_threshold": + search_kwargs["score_threshold"] = self.search_score_threshold + results = vector_store.similarity_search_with_relevance_scores(query, **search_kwargs) + return [ + { + "page_content": doc.page_content, + "metadata": doc.metadata, + "score": score, + } + for doc, score in results + ] + if search_type == "mmr": + results = vector_store.max_marginal_relevance_search(query, **search_kwargs) + return [{"page_content": doc.page_content, "metadata": doc.metadata} for doc in results] + + except Exception as e: + error_message = f"Error during search: {e}" + logger.exception(error_message) + raise RuntimeError(error_message) from e + + error_message = f"Error during search. Invalid search type: {self.search_type}" + logger.error(error_message) + raise ValueError(error_message) + + def search_documents(self) -> list[Data]: + """Search for documents in the vector store based on the search input. + + If no search input is provided, retrieve all documents. + """ + try: + query = self.search_input.strip() if self.search_input else None + results = self.search(query) + retrieved_data = [ + Data( + file_path=result["metadata"].get("file_path", ""), + text=result["page_content"], + ) + for result in results + ] + except Exception as e: + error_message = f"Error during document search: {e}" + logger.exception(error_message) + raise RuntimeError(error_message) from e + + self.status = retrieved_data + return retrieved_data diff --git a/src/backend/base/langflow/components/vectorstores/pgvector.py b/src/backend/base/langflow/components/vectorstores/pgvector.py index 8a577cee9861..cb3c8838cc52 100644 --- a/src/backend/base/langflow/components/vectorstores/pgvector.py +++ b/src/backend/base/langflow/components/vectorstores/pgvector.py @@ -1,10 +1,8 @@ -from typing import List - from langchain_community.vectorstores import PGVector from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store from langflow.helpers.data import docs_to_data -from langflow.io import HandleInput, IntInput, StrInput, SecretStrInput, DataInput, MultilineInput +from langflow.io import DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput from langflow.schema import Data from langflow.utils.connection_string_parser import transform_connection_string @@ -63,7 +61,7 @@ def build_vector_store(self) -> PGVector: return pgvector - def search_documents(self) -> List[Data]: + def search_documents(self) -> list[Data]: vector_store = self.build_vector_store() if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): @@ -75,5 +73,4 @@ def search_documents(self) -> List[Data]: data = docs_to_data(docs) self.status = data return data - else: - return [] + return [] diff --git a/src/backend/base/langflow/components/vectorstores/pinecone.py b/src/backend/base/langflow/components/vectorstores/pinecone.py new file mode 100644 index 000000000000..17eafefe58aa --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/pinecone.py @@ -0,0 +1,130 @@ +import numpy as np +from langchain_pinecone import Pinecone + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import DataInput, DropdownInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class PineconeVectorStoreComponent(LCVectorStoreComponent): + display_name = "Pinecone" + description = "Pinecone Vector Store with search capabilities" + documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/pinecone/" + name = "Pinecone" + icon = "Pinecone" + inputs = [ + StrInput(name="index_name", display_name="Index Name", required=True), + StrInput(name="namespace", display_name="Namespace", info="Namespace for the index."), + DropdownInput( + name="distance_strategy", + display_name="Distance Strategy", + options=["Cosine", "Euclidean", "Dot Product"], + value="Cosine", + advanced=True, + ), + SecretStrInput(name="pinecone_api_key", display_name="Pinecone API Key", required=True), + StrInput( + name="text_key", + display_name="Text Key", + info="Key in the record to use as text.", + value="text", + advanced=True, + ), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Pinecone: + """Build and return a Pinecone vector store instance.""" + try: + from langchain_pinecone._utilities import DistanceStrategy + + # Wrap the embedding model to ensure float32 output + wrapped_embeddings = Float32Embeddings(self.embedding) + + # Convert distance strategy + distance_strategy = self.distance_strategy.replace(" ", "_").upper() + distance_strategy = DistanceStrategy[distance_strategy] + + # Initialize Pinecone instance with wrapped embeddings + pinecone = Pinecone( + index_name=self.index_name, + embedding=wrapped_embeddings, # Use wrapped embeddings + text_key=self.text_key, + namespace=self.namespace, + distance_strategy=distance_strategy, + pinecone_api_key=self.pinecone_api_key, + ) + except Exception as e: + error_msg = "Error building Pinecone vector store" + raise ValueError(error_msg) from e + else: + # Process documents if any + documents = [] + if self.ingest_data: + for doc in self.ingest_data: + if isinstance(doc, Data): + documents.append(doc.to_lc_document()) + else: + documents.append(doc) + + if documents: + pinecone.add_documents(documents) + + return pinecone + + def search_documents(self) -> list[Data]: + """Search documents in the vector store.""" + try: + if not self.search_query or not isinstance(self.search_query, str) or not self.search_query.strip(): + return [] + + vector_store = self.build_vector_store() + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + except Exception as e: + error_msg = "Error searching documents" + raise ValueError(error_msg) from e + else: + data = docs_to_data(docs) + self.status = data + return data + + +class Float32Embeddings: + """Wrapper class to ensure float32 embeddings.""" + + def __init__(self, base_embeddings): + self.base_embeddings = base_embeddings + + def embed_documents(self, texts): + embeddings = self.base_embeddings.embed_documents(texts) + if isinstance(embeddings, np.ndarray): + return [[self._force_float32(x) for x in vec] for vec in embeddings] + return [[self._force_float32(x) for x in vec] for vec in embeddings] + + def embed_query(self, text): + embedding = self.base_embeddings.embed_query(text) + if isinstance(embedding, np.ndarray): + return [self._force_float32(x) for x in embedding] + return [self._force_float32(x) for x in embedding] + + def _force_float32(self, value): + """Convert any numeric type to Python float.""" + return float(np.float32(value)) diff --git a/src/backend/base/langflow/components/vectorstores/qdrant.py b/src/backend/base/langflow/components/vectorstores/qdrant.py new file mode 100644 index 000000000000..bc025f704a8b --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/qdrant.py @@ -0,0 +1,114 @@ +from langchain.embeddings.base import Embeddings +from langchain_community.vectorstores import Qdrant + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import ( + DataInput, + DropdownInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class QdrantVectorStoreComponent(LCVectorStoreComponent): + display_name = "Qdrant" + description = "Qdrant Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant" + icon = "Qdrant" + + inputs = [ + StrInput(name="collection_name", display_name="Collection Name", required=True), + StrInput(name="host", display_name="Host", value="localhost", advanced=True), + IntInput(name="port", display_name="Port", value=6333, advanced=True), + IntInput(name="grpc_port", display_name="gRPC Port", value=6334, advanced=True), + SecretStrInput(name="api_key", display_name="API Key", advanced=True), + StrInput(name="prefix", display_name="Prefix", advanced=True), + IntInput(name="timeout", display_name="Timeout", advanced=True), + StrInput(name="path", display_name="Path", advanced=True), + StrInput(name="url", display_name="URL", advanced=True), + DropdownInput( + name="distance_func", + display_name="Distance Function", + options=["Cosine", "Euclidean", "Dot Product"], + value="Cosine", + advanced=True, + ), + StrInput(name="content_payload_key", display_name="Content Payload Key", value="page_content", advanced=True), + StrInput(name="metadata_payload_key", display_name="Metadata Payload Key", value="metadata", advanced=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Qdrant: + qdrant_kwargs = { + "collection_name": self.collection_name, + "content_payload_key": self.content_payload_key, + "metadata_payload_key": self.metadata_payload_key, + } + + server_kwargs = { + "host": self.host or None, + "port": int(self.port), # Ensure port is an integer + "grpc_port": int(self.grpc_port), # Ensure grpc_port is an integer + "api_key": self.api_key, + "prefix": self.prefix, + # Ensure timeout is an integer + "timeout": int(self.timeout) if self.timeout else None, + "path": self.path or None, + "url": self.url or None, + } + + server_kwargs = {k: v for k, v in server_kwargs.items() if v is not None} + documents = [] + + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if not isinstance(self.embedding, Embeddings): + msg = "Invalid embedding object" + raise TypeError(msg) + + if documents: + qdrant = Qdrant.from_documents(documents, embedding=self.embedding, **qdrant_kwargs, **server_kwargs) + else: + from qdrant_client import QdrantClient + + client = QdrantClient(**server_kwargs) + qdrant = Qdrant(embeddings=self.embedding, client=client, **qdrant_kwargs) + + return qdrant + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/redis.py b/src/backend/base/langflow/components/vectorstores/redis.py new file mode 100644 index 000000000000..9e25f0bb227a --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/redis.py @@ -0,0 +1,92 @@ +from pathlib import Path + +from langchain.text_splitter import CharacterTextSplitter +from langchain_community.vectorstores.redis import Redis + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class RedisVectorStoreComponent(LCVectorStoreComponent): + """A custom component for implementing a Vector Store using Redis.""" + + display_name: str = "Redis" + description: str = "Implementation of Vector Store using Redis" + documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis" + name = "Redis" + + inputs = [ + SecretStrInput(name="redis_server_url", display_name="Redis Server Connection String", required=True), + StrInput( + name="redis_index_name", + display_name="Redis Index", + ), + StrInput(name="code", display_name="Code", advanced=True), + StrInput( + name="schema", + display_name="Schema", + ), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Redis: + documents = [] + + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + Path("docuemnts.txt").write_text(str(documents), encoding="utf-8") + + if not documents: + if self.schema is None: + msg = "If no documents are provided, a schema must be provided." + raise ValueError(msg) + redis_vs = Redis.from_existing_index( + embedding=self.embedding, + index_name=self.redis_index_name, + schema=self.schema, + key_prefix=None, + redis_url=self.redis_server_url, + ) + else: + text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) + docs = text_splitter.split_documents(documents) + redis_vs = Redis.from_documents( + documents=docs, + embedding=self.embedding, + redis_url=self.redis_server_url, + index_name=self.redis_index_name, + ) + return redis_vs + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/supabase.py b/src/backend/base/langflow/components/vectorstores/supabase.py new file mode 100644 index 000000000000..b5da646a60ea --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/supabase.py @@ -0,0 +1,79 @@ +from langchain_community.vectorstores import SupabaseVectorStore +from supabase.client import Client, create_client + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class SupabaseVectorStoreComponent(LCVectorStoreComponent): + display_name = "Supabase" + description = "Supabase Vector Store with search capabilities" + documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/supabase/" + name = "SupabaseVectorStore" + icon = "Supabase" + + inputs = [ + StrInput(name="supabase_url", display_name="Supabase URL", required=True), + SecretStrInput(name="supabase_service_key", display_name="Supabase Service Key", required=True), + StrInput(name="table_name", display_name="Table Name", advanced=True), + StrInput(name="query_name", display_name="Query Name"), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> SupabaseVectorStore: + supabase: Client = create_client(self.supabase_url, supabase_key=self.supabase_service_key) + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + supabase_vs = SupabaseVectorStore.from_documents( + documents=documents, + embedding=self.embedding, + query_name=self.query_name, + client=supabase, + table_name=self.table_name, + ) + else: + supabase_vs = SupabaseVectorStore( + client=supabase, + embedding=self.embedding, + table_name=self.table_name, + query_name=self.query_name, + ) + + return supabase_vs + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/upstash.py b/src/backend/base/langflow/components/vectorstores/upstash.py new file mode 100644 index 000000000000..49001a8083bc --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/upstash.py @@ -0,0 +1,128 @@ +from langchain_community.vectorstores import UpstashVectorStore + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import ( + DataInput, + HandleInput, + IntInput, + MultilineInput, + SecretStrInput, + StrInput, +) +from langflow.schema import Data + + +class UpstashVectorStoreComponent(LCVectorStoreComponent): + display_name = "Upstash" + description = "Upstash Vector Store with search capabilities" + documentation = "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/" + name = "Upstash" + icon = "Upstash" + + inputs = [ + StrInput( + name="index_url", + display_name="Index URL", + info="The URL of the Upstash index.", + required=True, + ), + SecretStrInput( + name="index_token", + display_name="Index Token", + info="The token for the Upstash index.", + required=True, + ), + StrInput( + name="text_key", + display_name="Text Key", + info="The key in the record to use as text.", + value="text", + advanced=True, + ), + StrInput( + name="namespace", + display_name="Namespace", + info="Leave empty for default namespace.", + ), + MultilineInput(name="search_query", display_name="Search Query"), + MultilineInput( + name="metadata_filter", + display_name="Metadata Filter", + info="Filters documents by metadata. Look at the documentation for more information.", + ), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput( + name="embedding", + display_name="Embedding", + input_types=["Embeddings"], + info="To use Upstash's embeddings, don't provide an embedding.", + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> UpstashVectorStore: + use_upstash_embedding = self.embedding is None + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + if use_upstash_embedding: + upstash_vs = UpstashVectorStore( + embedding=use_upstash_embedding, + text_key=self.text_key, + index_url=self.index_url, + index_token=self.index_token, + namespace=self.namespace, + ) + upstash_vs.add_documents(documents) + else: + upstash_vs = UpstashVectorStore.from_documents( + documents=documents, + embedding=self.embedding, + text_key=self.text_key, + index_url=self.index_url, + index_token=self.index_token, + namespace=self.namespace, + ) + else: + upstash_vs = UpstashVectorStore( + embedding=self.embedding or use_upstash_embedding, + text_key=self.text_key, + index_url=self.index_url, + index_token=self.index_token, + namespace=self.namespace, + ) + + return upstash_vs + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + filter=self.metadata_filter, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/components/vectorstores/vectara.py b/src/backend/base/langflow/components/vectorstores/vectara.py new file mode 100644 index 000000000000..697bd4471c6e --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/vectara.py @@ -0,0 +1,104 @@ +from typing import TYPE_CHECKING + +from langchain_community.vectorstores import Vectara +from loguru import logger + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import HandleInput, IntInput, MessageTextInput, SecretStrInput, StrInput +from langflow.schema import Data + +if TYPE_CHECKING: + from langchain_community.vectorstores import Vectara + + +class VectaraVectorStoreComponent(LCVectorStoreComponent): + """Vectara Vector Store with search capabilities.""" + + display_name: str = "Vectara" + description: str = "Vectara Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/vectara" + name = "Vectara" + icon = "Vectara" + + inputs = [ + StrInput(name="vectara_customer_id", display_name="Vectara Customer ID", required=True), + StrInput(name="vectara_corpus_id", display_name="Vectara Corpus ID", required=True), + SecretStrInput(name="vectara_api_key", display_name="Vectara API Key", required=True), + HandleInput( + name="embedding", + display_name="Embedding", + input_types=["Embeddings"], + ), + HandleInput( + name="ingest_data", + display_name="Ingest Data", + input_types=["Document", "Data"], + is_list=True, + ), + MessageTextInput( + name="search_query", + display_name="Search Query", + ), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + ] + + @check_cached_vector_store + def build_vector_store(self) -> "Vectara": + """Builds the Vectara object.""" + try: + from langchain_community.vectorstores import Vectara + except ImportError as e: + msg = "Could not import Vectara. Please install it with `pip install langchain-community`." + raise ImportError(msg) from e + + vectara = Vectara( + vectara_customer_id=self.vectara_customer_id, + vectara_corpus_id=self.vectara_corpus_id, + vectara_api_key=self.vectara_api_key, + ) + + self._add_documents_to_vector_store(vectara) + return vectara + + def _add_documents_to_vector_store(self, vector_store: "Vectara") -> None: + """Adds documents to the Vector Store.""" + if not self.ingest_data: + self.status = "No documents to add to Vectara" + return + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: + logger.debug(f"Adding {len(documents)} documents to Vectara.") + vector_store.add_documents(documents) + self.status = f"Added {len(documents)} documents to Vectara" + else: + logger.debug("No documents to add to Vectara.") + self.status = "No valid documents to add to Vectara" + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = f"Found {len(data)} results for the query: {self.search_query}" + return data + self.status = "No search query provided" + return [] diff --git a/src/backend/base/langflow/components/vectorstores/vectara_rag.py b/src/backend/base/langflow/components/vectorstores/vectara_rag.py index c620eb66948d..3ec48e98b355 100644 --- a/src/backend/base/langflow/components/vectorstores/vectara_rag.py +++ b/src/backend/base/langflow/components/vectorstores/vectara_rag.py @@ -1,6 +1,6 @@ from langflow.custom import Component from langflow.field_typing.range_spec import RangeSpec -from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, StrInput, SecretStrInput, Output +from langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, Output, SecretStrInput, StrInput from langflow.schema.message import Message @@ -65,7 +65,8 @@ class VectaraRagComponent(Component): range_spec=RangeSpec(min=0.005, max=0.1, step=0.005), value=0.005, advanced=True, - info="How much to weigh lexical scores compared to the embedding score. 0 means lexical search is not used at all, and 1 means only lexical search is used.", + info="How much to weigh lexical scores compared to the embedding score. " + "0 means lexical search is not used at all, and 1 means only lexical search is used.", ), MessageTextInput( name="filter", @@ -118,7 +119,8 @@ class VectaraRagComponent(Component): options=SUMMARIZER_PROMPTS, value=SUMMARIZER_PROMPTS[0], advanced=True, - info="Only vectara-summary-ext-24-05-sml is for Growth customers; all other prompts are for Scale customers only.", + info="Only vectara-summary-ext-24-05-sml is for Growth customers; " + "all other prompts are for Scale customers only.", ), ] @@ -134,8 +136,9 @@ def generate_response( try: from langchain_community.vectorstores import Vectara from langchain_community.vectorstores.vectara import RerankConfig, SummaryConfig, VectaraQueryConfig - except ImportError: - raise ImportError("Could not import Vectara. Please install it with `pip install langchain-community`.") + except ImportError as e: + msg = "Could not import Vectara. Please install it with `pip install langchain-community`." + raise ImportError(msg) from e vectara = Vectara(self.vectara_customer_id, self.vectara_corpus_id, self.vectara_api_key) rerank_config = RerankConfig(self.reranker, self.reranker_k, self.diversity_bias) diff --git a/src/backend/base/langflow/components/vectorstores/vectara_self_query.py b/src/backend/base/langflow/components/vectorstores/vectara_self_query.py new file mode 100644 index 000000000000..d53d499a3f9c --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/vectara_self_query.py @@ -0,0 +1,67 @@ +import json +from typing import cast + +from langchain.chains.query_constructor.base import AttributeInfo +from langchain.retrievers.self_query.base import SelfQueryRetriever +from langchain_core.vectorstores import VectorStore + +from langflow.custom import CustomComponent +from langflow.field_typing import Retriever +from langflow.field_typing.constants import LanguageModel + + +class VectaraSelfQueryRetriverComponent(CustomComponent): + """A custom component for implementing Vectara Self Query Retriever using a vector store.""" + + display_name: str = "Vectara Self Query Retriever for Vectara Vector Store" + description: str = "Implementation of Vectara Self Query Retriever" + documentation = "https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query" + name = "VectaraSelfQueryRetriver" + icon = "Vectara" + legacy = True + + field_config = { + "code": {"show": True}, + "vectorstore": {"display_name": "Vector Store", "info": "Input Vectara Vectore Store"}, + "llm": {"display_name": "LLM", "info": "For self query retriever"}, + "document_content_description": { + "display_name": "Document Content Description", + "info": "For self query retriever", + }, + "metadata_field_info": { + "display_name": "Metadata Field Info", + "info": "Each metadata field info is a string in the form of key value pair dictionary containing " + "additional search metadata.\n" + 'Example input: {"name":"speech","description":"what name of the speech","type":' + '"string or list[string]"}.\n' + "The keys should remain constant(name, description, type)", + }, + } + + def build( + self, + vectorstore: VectorStore, + document_content_description: str, + llm: LanguageModel, + metadata_field_info: list[str], + ) -> Retriever: + metadata_field_obj = [] + + for meta in metadata_field_info: + meta_obj = json.loads(meta) + if "name" not in meta_obj or "description" not in meta_obj or "type" not in meta_obj: + msg = "Incorrect metadata field info format." + raise ValueError(msg) + attribute_info = AttributeInfo( + name=meta_obj["name"], + description=meta_obj["description"], + type=meta_obj["type"], + ) + metadata_field_obj.append(attribute_info) + + return cast( + Retriever, + SelfQueryRetriever.from_llm( + llm, vectorstore, document_content_description, metadata_field_obj, verbose=True + ), + ) diff --git a/src/backend/base/langflow/components/vectorstores/weaviate.py b/src/backend/base/langflow/components/vectorstores/weaviate.py new file mode 100644 index 000000000000..ef01bc4443df --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/weaviate.py @@ -0,0 +1,92 @@ +import weaviate +from langchain_community.vectorstores import Weaviate + +from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from langflow.helpers.data import docs_to_data +from langflow.io import BoolInput, DataInput, HandleInput, IntInput, MultilineInput, SecretStrInput, StrInput +from langflow.schema import Data + + +class WeaviateVectorStoreComponent(LCVectorStoreComponent): + display_name = "Weaviate" + description = "Weaviate Vector Store with search capabilities" + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate" + name = "Weaviate" + icon = "Weaviate" + + inputs = [ + StrInput(name="url", display_name="Weaviate URL", value="http://localhost:8080", required=True), + SecretStrInput(name="api_key", display_name="API Key", required=False), + StrInput( + name="index_name", + display_name="Index Name", + required=True, + info="Requires capitalized index name.", + ), + StrInput(name="text_key", display_name="Text Key", value="text", advanced=True), + MultilineInput(name="search_query", display_name="Search Query"), + DataInput( + name="ingest_data", + display_name="Ingest Data", + is_list=True, + ), + HandleInput(name="embedding", display_name="Embedding", input_types=["Embeddings"]), + IntInput( + name="number_of_results", + display_name="Number of Results", + info="Number of results to return.", + value=4, + advanced=True, + ), + BoolInput(name="search_by_text", display_name="Search By Text", advanced=True), + ] + + @check_cached_vector_store + def build_vector_store(self) -> Weaviate: + if self.api_key: + auth_config = weaviate.AuthApiKey(api_key=self.api_key) + client = weaviate.Client(url=self.url, auth_client_secret=auth_config) + else: + client = weaviate.Client(url=self.url) + + if self.index_name != self.index_name.capitalize(): + msg = f"Weaviate requires the index name to be capitalized. Use: {self.index_name.capitalize()}" + raise ValueError(msg) + + documents = [] + for _input in self.ingest_data or []: + if isinstance(_input, Data): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents and self.embedding: + return Weaviate.from_documents( + client=client, + index_name=self.index_name, + documents=documents, + embedding=self.embedding, + by_text=self.search_by_text, + ) + + return Weaviate( + client=client, + index_name=self.index_name, + text_key=self.text_key, + embedding=self.embedding, + by_text=self.search_by_text, + ) + + def search_documents(self) -> list[Data]: + vector_store = self.build_vector_store() + + if self.search_query and isinstance(self.search_query, str) and self.search_query.strip(): + docs = vector_store.similarity_search( + query=self.search_query, + k=self.number_of_results, + ) + + data = docs_to_data(docs) + self.status = data + return data + return [] diff --git a/src/backend/base/langflow/core/celery_app.py b/src/backend/base/langflow/core/celery_app.py index ef3fc6545610..74e29c5e08a4 100644 --- a/src/backend/base/langflow/core/celery_app.py +++ b/src/backend/base/langflow/core/celery_app.py @@ -1,4 +1,4 @@ -from celery import Celery # type: ignore +from celery import Celery def make_celery(app_name: str, config: str) -> Celery: diff --git a/src/backend/base/langflow/custom/__init__.py b/src/backend/base/langflow/custom/__init__.py index 0ba9d831da8c..55a2b1973425 100644 --- a/src/backend/base/langflow/custom/__init__.py +++ b/src/backend/base/langflow/custom/__init__.py @@ -1,4 +1,4 @@ from langflow.custom.custom_component.component import Component from langflow.custom.custom_component.custom_component import CustomComponent -__all__ = ["CustomComponent", "Component"] +__all__ = ["Component", "CustomComponent"] diff --git a/src/backend/base/langflow/custom/attributes.py b/src/backend/base/langflow/custom/attributes.py index d96500c78c96..800533921214 100644 --- a/src/backend/base/langflow/custom/attributes.py +++ b/src/backend/base/langflow/custom/attributes.py @@ -1,23 +1,24 @@ -import warnings -from typing import Callable +from collections.abc import Callable import emoji +from loguru import logger -def validate_icon(value: str, *args, **kwargs): +def validate_icon(value: str): # we are going to use the emoji library to validate the emoji # emojis can be defined using the :emoji_name: syntax if not value.startswith(":") and not value.endswith(":"): return value - elif not value.startswith(":") or not value.endswith(":"): + if not value.startswith(":") or not value.endswith(":"): # emoji should have both starting and ending colons # so if one of them is missing, we will raise - raise ValueError(f"Invalid emoji. {value} is not a valid emoji.") + msg = f"Invalid emoji. {value} is not a valid emoji." + raise ValueError(msg) emoji_value = emoji.emojize(value, variant="emoji_type") if value == emoji_value: - warnings.warn(f"Invalid emoji. {value} is not a valid emoji.") + logger.warning(f"Invalid emoji. {value} is not a valid emoji.") return value return emoji_value @@ -29,6 +30,7 @@ def getattr_return_str(value): def getattr_return_bool(value): if isinstance(value, bool): return value + return None def getattr_return_list_of_str(value): @@ -43,16 +45,33 @@ def getattr_return_list_of_object(value): return [] +def getattr_return_list_of_values_from_dict(value): + if isinstance(value, dict): + return list(value.values()) + return [] + + +def getattr_return_dict(value): + if isinstance(value, dict): + return value + return {} + + ATTR_FUNC_MAPPING: dict[str, Callable] = { "display_name": getattr_return_str, "description": getattr_return_str, "beta": getattr_return_bool, + "legacy": getattr_return_bool, "documentation": getattr_return_str, "icon": validate_icon, "frozen": getattr_return_bool, "is_input": getattr_return_bool, "is_output": getattr_return_bool, "conditional_paths": getattr_return_list_of_str, + "_outputs_map": getattr_return_list_of_values_from_dict, + "_inputs": getattr_return_list_of_values_from_dict, "outputs": getattr_return_list_of_object, "inputs": getattr_return_list_of_object, + "metadata": getattr_return_dict, + "tool_mode": getattr_return_bool, } diff --git a/src/backend/base/langflow/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py index a5d8c5595449..72522d252c7d 100644 --- a/src/backend/base/langflow/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -1,7 +1,10 @@ import ast +import contextlib import inspect import traceback -from typing import Any, Dict, List, Type, Union +from itertools import starmap +from pathlib import Path +from typing import Any from cachetools import TTLCache, keys from fastapi import HTTPException @@ -29,8 +32,7 @@ def find_class_ast_node(class_obj): return None, [] # Read the source code from the file - with open(source_file, "r") as file: - source_code = file.read() + source_code = Path(source_file).read_text(encoding="utf-8") # Parse the source code into an AST tree = ast.parse(source_code) @@ -41,7 +43,7 @@ def find_class_ast_node(class_obj): for node in ast.walk(tree): if isinstance(node, ast.ClassDef) and node.name == class_obj.__name__: class_node = node - elif isinstance(node, (ast.Import, ast.ImportFrom)): + elif isinstance(node, ast.Import | ast.ImportFrom): import_nodes.append(node) return class_node, import_nodes @@ -55,22 +57,19 @@ def imports_key(*args, **kwargs): class CodeParser: - """ - A parser for Python source code, extracting code details. - """ + """A parser for Python source code, extracting code details.""" - def __init__(self, code: Union[str, Type]) -> None: - """ - Initializes the parser with the provided code. - """ + def __init__(self, code: str | type) -> None: + """Initializes the parser with the provided code.""" self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60) if isinstance(code, type): if not inspect.isclass(code): - raise ValueError("The provided code must be a class.") + msg = "The provided code must be a class." + raise ValueError(msg) # If the code is a class, get its source code code = inspect.getsource(code) self.code = code - self.data: Dict[str, Any] = { + self.data: dict[str, Any] = { "imports": [], "functions": [], "classes": [], @@ -85,8 +84,8 @@ def __init__(self, code: Union[str, Type]) -> None: } def get_tree(self): - """ - Parses the provided code to validate its syntax. + """Parses the provided code to validate its syntax. + It tries to parse the code into an abstract syntax tree (AST). """ try: @@ -99,18 +98,13 @@ def get_tree(self): return tree - def parse_node(self, node: Union[ast.stmt, ast.AST]) -> None: - """ - Parses an AST node and updates the data - dictionary with the relevant information. - """ - if handler := self.handlers.get(type(node)): # type: ignore - handler(node) # type: ignore + def parse_node(self, node: ast.stmt | ast.AST) -> None: + """Parses an AST node and updates the data dictionary with the relevant information.""" + if handler := self.handlers.get(type(node)): + handler(node) # type: ignore[operator] - def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None: - """ - Extracts "imports" from the code, including aliases. - """ + def parse_imports(self, node: ast.Import | ast.ImportFrom) -> None: + """Extracts "imports" from the code, including aliases.""" if isinstance(node, ast.Import): for alias in node.names: if alias.asname: @@ -125,15 +119,11 @@ def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None: self.data["imports"].append((node.module, alias.name)) def parse_functions(self, node: ast.FunctionDef) -> None: - """ - Extracts "functions" from the code. - """ + """Extracts "functions" from the code.""" self.data["functions"].append(self.parse_callable_details(node)) def parse_arg(self, arg, default): - """ - Parses an argument and its default value. - """ + """Parses an argument and its default value.""" arg_dict = {"name": arg.arg, "default": default} if arg.annotation: arg_dict["type"] = ast.unparse(arg.annotation) @@ -141,7 +131,8 @@ def parse_arg(self, arg, default): # @cachedmethod(operator.attrgetter("cache")) def construct_eval_env(self, return_type_str: str, imports) -> dict: - """ + """Constructs an evaluation environment. + Constructs an evaluation environment with the necessary imports for the return type, taking into account module aliases. """ @@ -158,23 +149,19 @@ def construct_eval_env(self, return_type_str: str, imports) -> dict: if " as " in module: module, alias = module.split(" as ") if module in return_type_str or (alias and alias in return_type_str): - exec(f"import {module} as {alias if alias else module}", eval_env) + exec(f"import {module} as {alias or module}", eval_env) return eval_env - def parse_callable_details(self, node: ast.FunctionDef) -> Dict[str, Any]: - """ - Extracts details from a single function or method node. - """ + def parse_callable_details(self, node: ast.FunctionDef) -> dict[str, Any]: + """Extracts details from a single function or method node.""" return_type = None if node.returns: return_type_str = ast.unparse(node.returns) eval_env = self.construct_eval_env(return_type_str, tuple(self.data["imports"])) - try: - return_type = eval(return_type_str, eval_env) - except NameError: - # Handle cases where the type is not found in the constructed environment - pass + # Handle cases where the type is not found in the constructed environment + with contextlib.suppress(NameError): + return_type = eval(return_type_str, eval_env) # noqa: S307 func = CallableCodeDetails( name=node.name, @@ -187,10 +174,8 @@ def parse_callable_details(self, node: ast.FunctionDef) -> Dict[str, Any]: return func.model_dump() - def parse_function_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: - """ - Parses the arguments of a function or method node. - """ + def parse_function_args(self, node: ast.FunctionDef) -> list[dict[str, Any]]: + """Parses the arguments of a function or method node.""" args = [] args += self.parse_positional_args(node) @@ -202,10 +187,8 @@ def parse_function_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: return args - def parse_positional_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: - """ - Parses the positional arguments of a function or method node. - """ + def parse_positional_args(self, node: ast.FunctionDef) -> list[dict[str, Any]]: + """Parses the positional arguments of a function or method node.""" num_args = len(node.args.args) num_defaults = len(node.args.defaults) num_missing_defaults = num_args - num_defaults @@ -217,13 +200,10 @@ def parse_positional_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: defaults = missing_defaults + default_values - args = [self.parse_arg(arg, default) for arg, default in zip(node.args.args, defaults)] - return args + return list(starmap(self.parse_arg, zip(node.args.args, defaults, strict=True))) - def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: - """ - Parses the *args argument of a function or method node. - """ + def parse_varargs(self, node: ast.FunctionDef) -> list[dict[str, Any]]: + """Parses the *args argument of a function or method node.""" args = [] if node.args.vararg: @@ -231,21 +211,16 @@ def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: return args - def parse_keyword_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: - """ - Parses the keyword-only arguments of a function or method node. - """ + def parse_keyword_args(self, node: ast.FunctionDef) -> list[dict[str, Any]]: + """Parses the keyword-only arguments of a function or method node.""" kw_defaults = [None] * (len(node.args.kwonlyargs) - len(node.args.kw_defaults)) + [ ast.unparse(default) if default else None for default in node.args.kw_defaults ] - args = [self.parse_arg(arg, default) for arg, default in zip(node.args.kwonlyargs, kw_defaults)] - return args + return list(starmap(self.parse_arg, zip(node.args.kwonlyargs, kw_defaults, strict=True))) - def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: - """ - Parses the **kwargs argument of a function or method node. - """ + def parse_kwargs(self, node: ast.FunctionDef) -> list[dict[str, Any]]: + """Parses the **kwargs argument of a function or method node.""" args = [] if node.args.kwarg: @@ -253,86 +228,73 @@ def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: return args - def parse_function_body(self, node: ast.FunctionDef) -> List[str]: - """ - Parses the body of a function or method node. - """ + def parse_function_body(self, node: ast.FunctionDef) -> list[str]: + """Parses the body of a function or method node.""" return [ast.unparse(line) for line in node.body] def parse_return_statement(self, node: ast.FunctionDef) -> bool: - """ - Parses the return statement of a function or method node, including nested returns. - """ + """Parses the return statement of a function or method node, including nested returns.""" def has_return(node): if isinstance(node, ast.Return): return True - elif isinstance(node, ast.If): + if isinstance(node, ast.If): return any(has_return(child) for child in node.body) or any(has_return(child) for child in node.orelse) - elif isinstance(node, ast.Try): + if isinstance(node, ast.Try): return ( any(has_return(child) for child in node.body) or any(has_return(child) for child in node.handlers) or any(has_return(child) for child in node.finalbody) ) - elif isinstance(node, (ast.For, ast.While)): + if isinstance(node, ast.For | ast.While): return any(has_return(child) for child in node.body) or any(has_return(child) for child in node.orelse) - elif isinstance(node, ast.With): + if isinstance(node, ast.With): return any(has_return(child) for child in node.body) - else: - return False + return False return any(has_return(child) for child in node.body) def parse_assign(self, stmt): - """ - Parses an Assign statement and returns a dictionary - with the target's name and value. - """ + """Parses an Assign statement and returns a dictionary with the target's name and value.""" for target in stmt.targets: if isinstance(target, ast.Name): return {"name": target.id, "value": ast.unparse(stmt.value)} + return None def parse_ann_assign(self, stmt): - """ - Parses an AnnAssign statement and returns a dictionary - with the target's name, value, and annotation. - """ + """Parses an AnnAssign statement and returns a dictionary with the target's name, value, and annotation.""" if isinstance(stmt.target, ast.Name): return { "name": stmt.target.id, "value": ast.unparse(stmt.value) if stmt.value else None, "annotation": ast.unparse(stmt.annotation), } + return None def parse_function_def(self, stmt): - """ - Parses a FunctionDef statement and returns the parsed - method and a boolean indicating if it's an __init__ method. + """Parse a FunctionDef statement. + + Parse a FunctionDef statement and return the parsed method and a boolean indicating if it's an __init__ method. """ method = self.parse_callable_details(stmt) return (method, True) if stmt.name == "__init__" else (method, False) def get_base_classes(self): - """ - Returns the base classes of the custom component class. - """ + """Returns the base classes of the custom component class.""" try: bases = self.execute_and_inspect_classes(self.code) - except Exception as e: + except Exception: # If the code cannot be executed, return an empty list bases = [] - raise e + raise return bases def parse_classes(self, node: ast.ClassDef) -> None: - """ - Extracts "classes" from the code, including inheritance and init methods. - """ + """Extracts "classes" from the code, including inheritance and init methods.""" bases = self.get_base_classes() nodes = [] for base in bases: - if base.__name__ == node.name or base.__name__ in ["CustomComponent", "Component", "BaseComponent"]: + if base.__name__ == node.name or base.__name__ in {"CustomComponent", "Component", "BaseComponent"}: continue try: class_node, import_nodes = find_class_ast_node(base) @@ -341,9 +303,8 @@ def parse_classes(self, node: ast.ClassDef) -> None: for import_node in import_nodes: self.parse_imports(import_node) nodes.append(class_node) - except Exception as exc: - logger.error(f"Error finding base class node: {exc}") - pass + except Exception: # noqa: BLE001 + logger.exception("Error finding base class node") nodes.insert(0, node) class_details = ClassCodeDetails( name=node.name, @@ -353,11 +314,11 @@ def parse_classes(self, node: ast.ClassDef) -> None: methods=[], init=None, ) - for node in nodes: - self.process_class_node(node, class_details) + for _node in nodes: + self.process_class_node(_node, class_details) self.data["classes"].append(class_details.model_dump()) - def process_class_node(self, node, class_details): + def process_class_node(self, node, class_details) -> None: for stmt in node.body: if isinstance(stmt, ast.Assign): if attr := self.parse_assign(stmt): @@ -365,7 +326,7 @@ def process_class_node(self, node, class_details): elif isinstance(stmt, ast.AnnAssign): if attr := self.parse_ann_assign(stmt): class_details.attributes.append(attr) - elif isinstance(stmt, (ast.FunctionDef, ast.AsyncFunctionDef)): + elif isinstance(stmt, ast.FunctionDef | ast.AsyncFunctionDef): method, is_init = self.parse_function_def(stmt) if is_init: class_details.init = method @@ -373,9 +334,7 @@ def process_class_node(self, node, class_details): class_details.methods.append(method) def parse_global_vars(self, node: ast.Assign) -> None: - """ - Extracts global variables from the code. - """ + """Extracts global variables from the code.""" global_var = { "targets": [t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets], "value": ast.unparse(node.value), @@ -390,14 +349,11 @@ def execute_and_inspect_classes(self, code: str): bases = [] for base in dunder_class.__bases__: bases.append(base) - for bases_base in base.__bases__: - bases.append(bases_base) + bases.extend(base.__bases__) return bases - def parse_code(self) -> Dict[str, Any]: - """ - Runs all parsing operations and returns the resulting data. - """ + def parse_code(self) -> dict[str, Any]: + """Runs all parsing operations and returns the resulting data.""" tree = self.get_tree() for node in ast.walk(tree): diff --git a/src/backend/base/langflow/custom/custom_component/base_component.py b/src/backend/base/langflow/custom/custom_component/base_component.py index 2403d590a956..5d1d63117b99 100644 --- a/src/backend/base/langflow/custom/custom_component/base_component.py +++ b/src/backend/base/langflow/custom/custom_component/base_component.py @@ -1,10 +1,10 @@ import operator -from typing import Any, ClassVar, Optional +from typing import Any, ClassVar from uuid import UUID -import warnings from cachetools import TTLCache, cachedmethod from fastapi import HTTPException +from loguru import logger from langflow.custom.attributes import ATTR_FUNC_MAPPING from langflow.custom.code_parser import CodeParser @@ -24,24 +24,24 @@ class BaseComponent: ERROR_CODE_NULL: ClassVar[str] = "Python code must be provided." ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = "The name of the entrypoint function must be provided." - _code: Optional[str] = None + _code: str | None = None """The code of the component. Defaults to None.""" _function_entrypoint_name: str = "build" field_config: dict = {} - _user_id: Optional[str | UUID] = None + _user_id: str | UUID | None = None _template_config: dict = {} - def __init__(self, **data): - self.cache = TTLCache(maxsize=1024, ttl=60) + def __init__(self, **data) -> None: + self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60) for key, value in data.items(): if key == "user_id": - setattr(self, "_user_id", value) + self._user_id = value else: setattr(self, key, value) - def __setattr__(self, key, value): - if key == "_user_id" and hasattr(self, "_user_id") and getattr(self, "_user_id") is not None: - warnings.warn("user_id is immutable and cannot be changed.") + def __setattr__(self, key, value) -> None: + if key == "_user_id" and self._user_id is not None: + logger.warning("user_id is immutable and cannot be changed.") super().__setattr__(key, value) @cachedmethod(cache=operator.attrgetter("cache")) @@ -69,9 +69,7 @@ def get_function(self): @staticmethod def get_template_config(component): - """ - Gets the template configuration for the custom component itself. - """ + """Gets the template configuration for the custom component itself.""" template_config = {} for attribute, func in ATTR_FUNC_MAPPING.items(): @@ -81,14 +79,13 @@ def get_template_config(component): template_config[attribute] = func(value=value) for key in template_config.copy(): - if key not in ATTR_FUNC_MAPPING.keys(): + if key not in ATTR_FUNC_MAPPING: template_config.pop(key, None) return template_config def build_template_config(self) -> dict: - """ - Builds the template configuration for the custom component. + """Builds the template configuration for the custom component. Returns: A dictionary representing the template configuration. @@ -98,8 +95,7 @@ def build_template_config(self) -> dict: cc_class = eval_custom_component_code(self._code) component_instance = cc_class(_code=self._code) - template_config = self.get_template_config(component_instance) - return template_config + return self.get_template_config(component_instance) def build(self, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError diff --git a/src/backend/base/langflow/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py index f3e9ee23f693..67c3852fa721 100644 --- a/src/backend/base/langflow/custom/custom_component/component.py +++ b/src/backend/base/langflow/custom/custom_component/component.py @@ -1,17 +1,29 @@ +from __future__ import annotations + +import ast +import asyncio import inspect +from collections.abc import AsyncIterator, Iterator from copy import deepcopy -from typing import TYPE_CHECKING, Any, Callable, ClassVar, List, Optional, Union, get_type_hints -from uuid import UUID +from textwrap import dedent +from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, get_type_hints -import nanoid # type: ignore +import nanoid import yaml -from pydantic import BaseModel +from langchain_core.tools import StructuredTool +from pydantic import BaseModel, ValidationError +from langflow.base.tools.constants import TOOL_OUTPUT_DISPLAY_NAME, TOOL_OUTPUT_NAME +from langflow.custom.tree_visitor import RequiredInputsVisitor +from langflow.exceptions.component import StreamingError +from langflow.field_typing import Tool # noqa: TCH001 Needed by _add_toolkit_output from langflow.graph.state.model import create_state_model from langflow.helpers.custom import format_type +from langflow.memory import delete_message, store_message, update_messages from langflow.schema.artifact import get_artifact_type, post_process_raw from langflow.schema.data import Data -from langflow.schema.message import Message +from langflow.schema.message import ErrorMessage, Message +from langflow.schema.properties import Source from langflow.services.tracing.schema import Log from langflow.template.field.base import UNDEFINED, Input, Output from langflow.template.frontend_node.custom_components import ComponentFrontendNode @@ -21,24 +33,69 @@ from .custom_component import CustomComponent if TYPE_CHECKING: + from collections.abc import Callable + + from langflow.events.event_manager import EventManager from langflow.graph.edge.schema import EdgeData from langflow.graph.vertex.base import Vertex from langflow.inputs.inputs import InputTypes + from langflow.schema import dotdict + from langflow.schema.log import LoggableType + + +_ComponentToolkit = None + + +def _get_component_toolkit(): + global _ComponentToolkit # noqa: PLW0603 + if _ComponentToolkit is None: + from langflow.base.tools.component_tool import ComponentToolkit + + _ComponentToolkit = ComponentToolkit + return _ComponentToolkit + BACKWARDS_COMPATIBLE_ATTRIBUTES = ["user_id", "vertex", "tracing_service"] -CONFIG_ATTRIBUTES = ["_display_name", "_description", "_icon", "_name"] +CONFIG_ATTRIBUTES = ["_display_name", "_description", "_icon", "_name", "_metadata"] + + +class PlaceholderGraph(NamedTuple): + """A placeholder graph structure for components, providing backwards compatibility. + + and enabling component execution without a full graph object. + + This lightweight structure contains essential information typically found in a complete graph, + allowing components to function in isolation or in simplified contexts. + + Attributes: + flow_id (str | None): Unique identifier for the flow, if applicable. + user_id (str | None): Identifier of the user associated with the flow, if any. + session_id (str | None): Identifier for the current session, if applicable. + context (dict): Additional contextual information for the component's execution. + flow_name (str | None): Name of the flow, if available. + """ + + flow_id: str | None + user_id: str | None + session_id: str | None + context: dict + flow_name: str | None class Component(CustomComponent): - inputs: List["InputTypes"] = [] - outputs: List[Output] = [] + inputs: list[InputTypes] = [] + outputs: list[Output] = [] code_class_base_inheritance: ClassVar[str] = "Component" - _output_logs: dict[str, Log] = {} - - def __init__(self, **kwargs): + _output_logs: dict[str, list[Log]] = {} + _current_output: str = "" + _metadata: dict = {} + _ctx: dict = {} + _code: str | None = None + _logs: list[Log] = [] + + def __init__(self, **kwargs) -> None: # if key starts with _ it is a config # else it is an input - self._reset_all_output_values() inputs = {} config = {} for key, value in kwargs.items(): @@ -48,13 +105,15 @@ def __init__(self, **kwargs): config[key[1:]] = value else: inputs[key] = value - self._inputs: dict[str, "InputTypes"] = {} - self._outputs: dict[str, Output] = {} + self._inputs: dict[str, InputTypes] = {} + self._outputs_map: dict[str, Output] = {} self._results: dict[str, Any] = {} self._attributes: dict[str, Any] = {} self._parameters = inputs or {} self._edges: list[EdgeData] = [] self._components: list[Component] = [] + self._current_output = "" + self._event_manager: EventManager | None = None self._state_model = None self.set_attributes(self._parameters) self._output_logs = {} @@ -63,6 +122,7 @@ def __init__(self, **kwargs): config |= {"_id": f"{self.__class__.__name__}-{nanoid.generate(size=5)}"} self.__inputs = inputs self.__config = config + self._reset_all_output_values() super().__init__(**config) if hasattr(self, "_trace_type"): self.trace_type = self._trace_type @@ -73,12 +133,64 @@ def __init__(self, **kwargs): if self.outputs is not None: self.map_outputs(self.outputs) # Set output types - self._set_output_types() + self._set_output_types(list(self._outputs_map.values())) self.set_class_code() + self._set_output_required_inputs() - def _reset_all_output_values(self): - for output in self.outputs: - setattr(output, "value", UNDEFINED) + @property + def ctx(self): + if not hasattr(self, "graph") or self.graph is None: + msg = "Graph not found. Please build the graph first." + raise ValueError(msg) + return self.graph.context + + def add_to_ctx(self, key: str, value: Any, *, overwrite: bool = False) -> None: + """Add a key-value pair to the context. + + Args: + key (str): The key to add. + value (Any): The value to associate with the key. + overwrite (bool, optional): Whether to overwrite the existing value. Defaults to False. + + Raises: + ValueError: If the graph is not built. + """ + if not hasattr(self, "graph") or self.graph is None: + msg = "Graph not found. Please build the graph first." + raise ValueError(msg) + if key in self.graph.context and not overwrite: + msg = f"Key {key} already exists in context. Set overwrite=True to overwrite." + raise ValueError(msg) + self.graph.context.update({key: value}) + + def update_ctx(self, value_dict: dict[str, Any]) -> None: + """Update the context with a dictionary of values. + + Args: + value_dict (dict[str, Any]): The dictionary of values to update. + + Raises: + ValueError: If the graph is not built. + """ + if not hasattr(self, "graph") or self.graph is None: + msg = "Graph not found. Please build the graph first." + raise ValueError(msg) + if not isinstance(value_dict, dict): + msg = "Value dict must be a dictionary" + raise TypeError(msg) + + self.graph.context.update(value_dict) + + def _pre_run_setup(self): + pass + + def set_event_manager(self, event_manager: EventManager | None = None) -> None: + self._event_manager = event_manager + + def _reset_all_output_values(self) -> None: + if isinstance(self._outputs_map, dict): + for output in self._outputs_map.values(): + output.value = UNDEFINED def _build_state_model(self): if self._state_model: @@ -86,7 +198,7 @@ def _build_state_model(self): name = self.name or self.__class__.__name__ model_name = f"{name}StateModel" fields = {} - for output in self.outputs: + for output in self._outputs_map.values(): fields[output.name] = getattr(self, output.method) self._state_model = create_state_model(model_name=model_name, **fields) return self._state_model @@ -100,40 +212,41 @@ def _instance_getter(_): _instance_getter.__annotations__["return"] = state_model return _instance_getter - def __deepcopy__(self, memo): + def __deepcopy__(self, memo: dict) -> Component: if id(self) in memo: return memo[id(self)] - kwargs = deepcopy(self.__config) - kwargs["inputs"] = deepcopy(self.__inputs) + kwargs = deepcopy(self.__config, memo) + kwargs["inputs"] = deepcopy(self.__inputs, memo) new_component = type(self)(**kwargs) new_component._code = self._code - new_component._outputs = self._outputs + new_component._outputs_map = self._outputs_map new_component._inputs = self._inputs new_component._edges = self._edges new_component._components = self._components new_component._parameters = self._parameters new_component._attributes = self._attributes new_component._output_logs = self._output_logs - new_component._logs = self._logs + new_component._logs = self._logs # type: ignore[attr-defined] memo[id(self)] = new_component return new_component - def set_class_code(self): + def set_class_code(self) -> None: # Get the source code of the calling class if self._code: return try: module = inspect.getmodule(self.__class__) if module is None: - raise ValueError("Could not find module for class") + msg = "Could not find module for class" + raise ValueError(msg) class_code = inspect.getsource(module) self._code = class_code - except OSError: - raise ValueError(f"Could not find source code for {self.__class__.__name__}") + except OSError as e: + msg = f"Could not find source code for {self.__class__.__name__}" + raise ValueError(msg) from e def set(self, **kwargs): - """ - Connects the component to other components or sets parameters and attributes. + """Connects the component to other components or sets parameters and attributes. Args: **kwargs: Keyword arguments representing the connections, parameters, and attributes. @@ -149,29 +262,23 @@ def set(self, **kwargs): return self def list_inputs(self): - """ - Returns a list of input names. - """ + """Returns a list of input names.""" return [_input.name for _input in self.inputs] def list_outputs(self): - """ - Returns a list of output names. - """ - return [_output.name for _output in self.outputs] + """Returns a list of output names.""" + return [_output.name for _output in self._outputs_map.values()] async def run(self): - """ - Executes the component's logic and returns the result. + """Executes the component's logic and returns the result. Returns: The result of executing the component's logic. """ return await self._run() - def set_vertex(self, vertex: "Vertex"): - """ - Sets the vertex for the component. + def set_vertex(self, vertex: Vertex) -> None: + """Sets the vertex for the component. Args: vertex (Vertex): The vertex to set. @@ -182,8 +289,7 @@ def set_vertex(self, vertex: "Vertex"): self._vertex = vertex def get_input(self, name: str) -> Any: - """ - Retrieves the value of the input with the specified name. + """Retrieves the value of the input with the specified name. Args: name (str): The name of the input. @@ -196,11 +302,11 @@ def get_input(self, name: str) -> Any: """ if name in self._inputs: return self._inputs[name] - raise ValueError(f"Input {name} not found in {self.__class__.__name__}") + msg = f"Input {name} not found in {self.__class__.__name__}" + raise ValueError(msg) def get_output(self, name: str) -> Any: - """ - Retrieves the output with the specified name. + """Retrieves the output with the specified name. Args: name (str): The name of the output to retrieve. @@ -211,26 +317,28 @@ def get_output(self, name: str) -> Any: Raises: ValueError: If the output with the specified name is not found. """ - if name in self._outputs: - return self._outputs[name] - raise ValueError(f"Output {name} not found in {self.__class__.__name__}") + if name in self._outputs_map: + return self._outputs_map[name] + msg = f"Output {name} not found in {self.__class__.__name__}" + raise ValueError(msg) - def set_on_output(self, name: str, **kwargs): + def set_on_output(self, name: str, **kwargs) -> None: output = self.get_output(name) for key, value in kwargs.items(): if not hasattr(output, key): - raise ValueError(f"Output {name} does not have a method {key}") + msg = f"Output {name} does not have a method {key}" + raise ValueError(msg) setattr(output, key, value) - def set_output_value(self, name: str, value: Any): - if name in self._outputs: - self._outputs[name].value = value + def set_output_value(self, name: str, value: Any) -> None: + if name in self._outputs_map: + self._outputs_map[name].value = value else: - raise ValueError(f"Output {name} not found in {self.__class__.__name__}") + msg = f"Output {name} not found in {self.__class__.__name__}" + raise ValueError(msg) - def map_outputs(self, outputs: List[Output]): - """ - Maps the given list of outputs to the component. + def map_outputs(self, outputs: list[Output]) -> None: + """Maps the given list of outputs to the component. Args: outputs (List[Output]): The list of outputs to be mapped. @@ -241,15 +349,16 @@ def map_outputs(self, outputs: List[Output]): Returns: None """ - self.outputs = outputs for output in outputs: if output.name is None: - raise ValueError("Output name cannot be None.") - self._outputs[output.name] = output + msg = "Output name cannot be None." + raise ValueError(msg) + # Deepcopy is required to avoid modifying the original component; + # allows each instance of each component to modify its own output + self._outputs_map[output.name] = deepcopy(output) - def map_inputs(self, inputs: List["InputTypes"]): - """ - Maps the given inputs to the component. + def map_inputs(self, inputs: list[InputTypes]) -> None: + """Maps the given inputs to the component. Args: inputs (List[InputTypes]): A list of InputTypes objects representing the inputs. @@ -258,15 +367,14 @@ def map_inputs(self, inputs: List["InputTypes"]): ValueError: If the input name is None. """ - self.inputs = inputs for input_ in inputs: if input_.name is None: - raise ValueError("Input name cannot be None.") - self._inputs[input_.name] = input_ + msg = "Input name cannot be None." + raise ValueError(msg) + self._inputs[input_.name] = deepcopy(input_) - def validate(self, params: dict): - """ - Validates the component parameters. + def validate(self, params: dict) -> None: + """Validates the component parameters. Args: params (dict): A dictionary containing the component parameters. @@ -278,54 +386,190 @@ def validate(self, params: dict): self._validate_inputs(params) self._validate_outputs() - def _set_output_types(self): + def update_inputs( + self, + build_config: dotdict, + field_value: Any, + field_name: str | None = None, + ): + return self.update_build_config(build_config, field_value, field_name) + + def run_and_validate_update_outputs(self, frontend_node: dict, field_name: str, field_value: Any): + frontend_node = self.update_outputs(frontend_node, field_name, field_value) + if field_name == "tool_mode": + # Replace all outputs with the tool_output value if tool_mode is True + # else replace it with the original outputs + frontend_node["outputs"] = [self._build_tool_output()] if field_value else frontend_node["outputs"] + return self._validate_frontend_node(frontend_node) + + def _validate_frontend_node(self, frontend_node: dict): + # Check if all outputs are either Output or a valid Output model + for index, output in enumerate(frontend_node["outputs"]): + if isinstance(output, dict): + try: + _output = Output(**output) + self._set_output_return_type(_output) + _output_dict = _output.model_dump() + except ValidationError as e: + msg = f"Invalid output: {e}" + raise ValueError(msg) from e + elif isinstance(output, Output): + # we need to serialize it + self._set_output_return_type(output) + _output_dict = output.model_dump() + else: + msg = f"Invalid output type: {type(output)}" + raise TypeError(msg) + frontend_node["outputs"][index] = _output_dict + return frontend_node + + def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any) -> dict: # noqa: ARG002 + """Default implementation for updating outputs based on field changes. + + Subclasses can override this to modify outputs based on field_name and field_value. + """ + return frontend_node + + def _set_output_types(self, outputs: list[Output]) -> None: + for output in outputs: + self._set_output_return_type(output) + + def _set_output_return_type(self, output: Output) -> None: + if output.method is None: + msg = f"Output {output.name} does not have a method" + raise ValueError(msg) + return_types = self._get_method_return_type(output.method) + output.add_types(return_types) + output.set_selected() + + def _set_output_required_inputs(self) -> None: for output in self.outputs: - return_types = self._get_method_return_type(output.method) - output.add_types(return_types) - output.set_selected() + if not output.method: + continue + method = getattr(self, output.method, None) + if not method or not callable(method): + continue + try: + source_code = inspect.getsource(method) + ast_tree = ast.parse(dedent(source_code)) + except Exception: # noqa: BLE001 + ast_tree = ast.parse(dedent(self._code or "")) + + visitor = RequiredInputsVisitor(self._inputs) + visitor.visit(ast_tree) + output.required_inputs = sorted(visitor.required_inputs) def get_output_by_method(self, method: Callable): # method is a callable and output.method is a string # we need to find the output that has the same method - output = next((output for output in self.outputs if output.method == method.__name__), None) + output = next((output for output in self._outputs_map.values() if output.method == method.__name__), None) if output is None: method_name = method.__name__ if hasattr(method, "__name__") else str(method) - raise ValueError(f"Output with method {method_name} not found") + msg = f"Output with method {method_name} not found" + raise ValueError(msg) return output def _inherits_from_component(self, method: Callable): # check if the method is a method from a class that inherits from Component # and that it is an output of that class - inherits_from_component = hasattr(method, "__self__") and isinstance(method.__self__, Component) - return inherits_from_component + return hasattr(method, "__self__") and isinstance(method.__self__, Component) def _method_is_valid_output(self, method: Callable): # check if the method is a method from a class that inherits from Component # and that it is an output of that class - method_is_output = ( + return ( hasattr(method, "__self__") and isinstance(method.__self__, Component) and method.__self__.get_output_by_method(method) ) - return method_is_output - def _process_connection_or_parameter(self, key, value): + def _build_error_string_from_matching_pairs(self, matching_pairs: list[tuple[Output, Input]]): + text = "" + for output, input_ in matching_pairs: + text += f"{output.name}[{','.join(output.types)}]->{input_.name}[{','.join(input_.input_types or [])}]\n" + return text + + def _find_matching_output_method(self, input_name: str, value: Component): + """Find the output method from the given component and input name. + + Find the output method from the given component (`value`) that matches the specified input (`input_name`) + in the current component. + This method searches through all outputs of the provided component to find outputs whose types match + the input types of the specified input in the current component. If exactly one matching output is found, + it returns the corresponding method. If multiple matching outputs are found, it raises an error indicating + ambiguity. If no matching outputs are found, it raises an error indicating that no suitable output was found. + + Args: + input_name (str): The name of the input in the current component to match. + value (Component): The component whose outputs are to be considered. + + Returns: + Callable: The method corresponding to the matching output. + + Raises: + ValueError: If multiple matching outputs are found, if no matching outputs are found, + or if the output method is invalid. + """ + # Retrieve all outputs from the given component + outputs = value._outputs_map.values() + # Prepare to collect matching output-input pairs + matching_pairs = [] + # Get the input object from the current component + input_ = self._inputs[input_name] + # Iterate over outputs to find matches based on types + matching_pairs = [ + (output, input_) + for output in outputs + for output_type in output.types + # Check if the output type matches the input's accepted types + if input_.input_types and output_type in input_.input_types + ] + # If multiple matches are found, raise an error indicating ambiguity + if len(matching_pairs) > 1: + matching_pairs_str = self._build_error_string_from_matching_pairs(matching_pairs) + msg = ( + f"There are multiple outputs from {value.__class__.__name__} " + f"that can connect to inputs in {self.__class__.__name__}: {matching_pairs_str}" + ) + # If no matches are found, raise an error indicating no suitable output + if not matching_pairs: + msg = ( + f"No matching output from {value.__class__.__name__} found for input '{input_name}' " + f"in {self.__class__.__name__}." + ) + raise ValueError(msg) + # Get the matching output and input pair + output, input_ = matching_pairs[0] + # Ensure that the output method is a valid method name (string) + if not isinstance(output.method, str): + msg = f"Method {output.method} is not a valid output of {value.__class__.__name__}" + raise TypeError(msg) + return getattr(value, output.method) + + def _process_connection_or_parameter(self, key, value) -> None: _input = self._get_or_create_input(key) # We need to check if callable AND if it is a method from a class that inherits from Component + if isinstance(value, Component): + # We need to find the Output that can connect to an input of the current component + # if there's more than one output that matches, we need to raise an error + # because we don't know which one to connect to + value = self._find_matching_output_method(key, value) if callable(value) and self._inherits_from_component(value): try: self._method_is_valid_output(value) - except ValueError: - raise ValueError( - f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}" - ) + except ValueError as e: + msg = f"Method {value.__name__} is not a valid output of {value.__self__.__class__.__name__}" + raise ValueError(msg) from e self._connect_to_component(key, value, _input) else: self._set_parameter_or_attribute(key, value) - def _process_connection_or_parameters(self, key, value): + def _process_connection_or_parameters(self, key, value) -> None: # if value is a list of components, we need to process each component - if isinstance(value, list): + # Note this update make sure it is not a list str | int | float | bool | type(None) + if isinstance(value, list) and not any( + isinstance(val, str | int | float | bool | type(None) | Message | Data | StructuredTool) for val in value + ): for val in value: self._process_connection_or_parameter(key, val) else: @@ -340,13 +584,13 @@ def _get_or_create_input(self, key): self.inputs.append(_input) return _input - def _connect_to_component(self, key, value, _input): + def _connect_to_component(self, key, value, _input) -> None: component = value.__self__ self._components.append(component) output = component.get_output_by_method(value) self._add_edge(component, key, output, _input) - def _add_edge(self, component, key, output, _input): + def _add_edge(self, component, key, output, _input) -> None: self._edges.append( { "source": component._id, @@ -368,13 +612,14 @@ def _add_edge(self, component, key, output, _input): } ) - def _set_parameter_or_attribute(self, key, value): + def _set_parameter_or_attribute(self, key, value) -> None: if isinstance(value, Component): methods = ", ".join([f"'{output.method}'" for output in value.outputs]) - raise ValueError( + msg = ( f"You set {value.display_name} as value for `{key}`. " f"You should pass one of the following: {methods}" ) + raise TypeError(msg) self._set_input_value(key, value) self._parameters[key] = value self._attributes[key] = value @@ -387,11 +632,10 @@ def __call__(self, **kwargs): async def _run(self): # Resolve callable inputs for key, _input in self._inputs.items(): - if callable(_input.value): - result = _input.value() - if inspect.iscoroutine(result): - result = await result - self._inputs[key].value = result + if asyncio.iscoroutinefunction(_input.value): + self._inputs[key].value = await _input.value() + elif callable(_input.value): + self._inputs[key].value = await asyncio.to_thread(_input.value) self.set_attributes({}) @@ -402,54 +646,65 @@ def __getattr__(self, name: str) -> Any: return self.__dict__["_attributes"][name] if "_inputs" in self.__dict__ and name in self.__dict__["_inputs"]: return self.__dict__["_inputs"][name].value - if "_outputs" in self.__dict__ and name in self.__dict__["_outputs"]: - return self.__dict__["_outputs"][name] + if "_outputs_map" in self.__dict__ and name in self.__dict__["_outputs_map"]: + return self.__dict__["_outputs_map"][name] if name in BACKWARDS_COMPATIBLE_ATTRIBUTES: return self.__dict__[f"_{name}"] if name.startswith("_") and name[1:] in BACKWARDS_COMPATIBLE_ATTRIBUTES: return self.__dict__[name] - raise AttributeError(f"{name} not found in {self.__class__.__name__}") + if name == "graph": + # If it got up to here it means it was going to raise + session_id = self._session_id if hasattr(self, "_session_id") else None + user_id = self._user_id if hasattr(self, "_user_id") else None + flow_name = self._flow_name if hasattr(self, "_flow_name") else None + flow_id = self._flow_id if hasattr(self, "_flow_id") else None + return PlaceholderGraph( + flow_id=flow_id, user_id=str(user_id), session_id=session_id, context={}, flow_name=flow_name + ) + msg = f"{name} not found in {self.__class__.__name__}" + raise AttributeError(msg) - def _set_input_value(self, name: str, value: Any): + def _set_input_value(self, name: str, value: Any) -> None: if name in self._inputs: input_value = self._inputs[name].value if isinstance(input_value, Component): methods = ", ".join([f"'{output.method}'" for output in input_value.outputs]) - raise ValueError( + msg = ( f"You set {input_value.display_name} as value for `{name}`. " f"You should pass one of the following: {methods}" ) - if callable(input_value): - raise ValueError( - f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}" - ) + raise ValueError(msg) + if callable(input_value) and hasattr(input_value, "__self__"): + msg = f"Input {name} is connected to {input_value.__self__.display_name}.{input_value.__name__}" + raise ValueError(msg) self._inputs[name].value = value if hasattr(self._inputs[name], "load_from_db"): self._inputs[name].load_from_db = False else: - raise ValueError(f"Input {name} not found in {self.__class__.__name__}") + msg = f"Input {name} not found in {self.__class__.__name__}" + raise ValueError(msg) - def _validate_outputs(self): + def _validate_outputs(self) -> None: # Raise Error if some rule isn't met pass - def _map_parameters_on_frontend_node(self, frontend_node: ComponentFrontendNode): + def _map_parameters_on_frontend_node(self, frontend_node: ComponentFrontendNode) -> None: for name, value in self._parameters.items(): frontend_node.set_field_value_in_template(name, value) - def _map_parameters_on_template(self, template: dict): + def _map_parameters_on_template(self, template: dict) -> None: for name, value in self._parameters.items(): try: template[name]["value"] = value - except KeyError: + except KeyError as e: close_match = find_closest_match(name, list(template.keys())) if close_match: - raise ValueError( - f"Parameter '{name}' not found in {self.__class__.__name__}. " f"Did you mean '{close_match}'?" - ) - raise ValueError(f"Parameter {name} not found in {self.__class__.__name__}. ") + msg = f"Parameter '{name}' not found in {self.__class__.__name__}. Did you mean '{close_match}'?" + raise ValueError(msg) from e + msg = f"Parameter {name} not found in {self.__class__.__name__}. " + raise ValueError(msg) from e - def _get_method_return_type(self, method_name: str) -> List[str]: + def _get_method_return_type(self, method_name: str) -> list[str]: method = getattr(self, method_name) return_type = get_type_hints(method)["return"] extracted_return_types = self._extract_return_type(return_type) @@ -459,11 +714,13 @@ def _update_template(self, frontend_node: dict): return frontend_node def to_frontend_node(self): - #! This part here is clunky but we need it like this for - #! backwards compatibility. We can change how prompt component - #! works and then update this later + # ! This part here is clunky but we need it like this for + # ! backwards compatibility. We can change how prompt component + # ! works and then update this later field_config = self.get_template_config(self) frontend_node = ComponentFrontendNode.from_inputs(**field_config) + for key in self._inputs: + frontend_node.set_field_load_from_db_in_template(key, value=False) self._map_parameters_on_frontend_node(frontend_node) frontend_node_dict = frontend_node.to_dict(keep_name=False) @@ -496,15 +753,16 @@ def to_frontend_node(self): frontend_node.validate_component() frontend_node.set_base_classes_from_outputs() - data = { + return { "data": { "node": frontend_node.to_dict(keep_name=False), "type": self.name or self.__class__.__name__, - } + "id": self._id, + }, + "id": self._id, } - return data - def _validate_inputs(self, params: dict): + def _validate_inputs(self, params: dict) -> None: # Params keys are the `name` attribute of the Input objects for key, value in params.copy().items(): if key not in self._inputs: @@ -515,26 +773,27 @@ def _validate_inputs(self, params: dict): input_.value = value params[input_.name] = input_.value - def set_attributes(self, params: dict): + def set_attributes(self, params: dict) -> None: self._validate_inputs(params) _attributes = {} for key, value in params.items(): if key in self.__dict__ and value != getattr(self, key): - raise ValueError( + msg = ( f"{self.__class__.__name__} defines an input parameter named '{key}' " f"that is a reserved word and cannot be used." ) + raise ValueError(msg) _attributes[key] = value for key, input_obj in self._inputs.items(): - if key not in _attributes: + if key not in _attributes and key not in self._attributes: _attributes[key] = input_obj.value or None - self._attributes = _attributes + self._attributes.update(_attributes) - def _set_outputs(self, outputs: List[dict]): + def _set_outputs(self, outputs: list[dict]) -> None: self.outputs = [Output(**output) for output in outputs] for output in self.outputs: setattr(self, output.name, output) - self._outputs[output.name] = output + self._outputs_map[output.name] = output def get_trace_as_inputs(self): predefined_inputs = { @@ -566,15 +825,43 @@ async def _build_without_tracing(self): return await self._build_results() async def build_results(self): - if self._tracing_service: - return await self._build_with_tracing() - return await self._build_without_tracing() + """Build the results of the component.""" + if hasattr(self, "graph"): + session_id = self.graph.session_id + elif hasattr(self, "_session_id"): + session_id = self._session_id + else: + session_id = None + try: + if self._tracing_service: + return await self._build_with_tracing() + return await self._build_without_tracing() + except StreamingError as e: + self.send_error( + exception=e.cause, + session_id=session_id, + trace_name=getattr(self, "trace_name", None), + source=e.source, + ) + raise e.cause # noqa: B904 + except Exception as e: + self.send_error( + exception=e, + session_id=session_id, + source=Source(id=self._id, display_name=self.display_name, source=self.display_name), + trace_name=getattr(self, "trace_name", None), + ) + raise - async def _build_results(self): + async def _build_results(self) -> tuple[dict, dict]: _results = {} _artifacts = {} + if hasattr(self, "_pre_run_setup"): + self._pre_run_setup() if hasattr(self, "outputs"): - for output in self.outputs: + if any(getattr(_input, "tool_mode", False) for _input in self.inputs): + self._append_tool_to_outputs_map() + for output in self._outputs_map.values(): # Build the output if it's connected to some other vertex # or if it's not connected to any vertex if ( @@ -583,16 +870,19 @@ async def _build_results(self): or output.name in self._vertex.edges_source_names ): if output.method is None: - raise ValueError(f"Output {output.name} does not have a method defined.") + msg = f"Output {output.name} does not have a method defined." + raise ValueError(msg) + self._current_output = output.name method: Callable = getattr(self, output.method) if output.cache and output.value != UNDEFINED: _results[output.name] = output.value result = output.value else: - result = method() # If the method is asynchronous, we need to await it if inspect.iscoroutinefunction(method): - result = await result + result = await method() + else: + result = await asyncio.to_thread(method) if ( self._vertex is not None and isinstance(result, Message) @@ -602,8 +892,9 @@ async def _build_results(self): result.set_flow_id(self._vertex.graph.flow_id) _results[output.name] = result output.value = result + custom_repr = self.custom_repr() - if custom_repr is None and isinstance(result, (dict, Data, str)): + if custom_repr is None and isinstance(result, dict | Data | str): custom_repr = result if not isinstance(custom_repr, str): custom_repr = str(custom_repr) @@ -621,7 +912,7 @@ async def _build_results(self): elif hasattr(raw, "model_dump") and raw is not None: raw = raw.model_dump() - if raw is None and isinstance(result, (dict, Data, str)): + if raw is None and isinstance(result, dict | Data | str): raw = result.data if isinstance(result, Data) else result artifact_type = get_artifact_type(artifact_value, result) raw, artifact_type = post_process_raw(raw, artifact_type) @@ -629,6 +920,7 @@ async def _build_results(self): _artifacts[output.name] = artifact self._output_logs[output.name] = self._logs self._logs = [] + self._current_output = "" self._artifacts = _artifacts self._results = _results if self._tracing_service: @@ -646,12 +938,8 @@ def custom_repr(self): return str(self.repr_value) return self.repr_value - def build_inputs(self, user_id: Optional[Union[str, UUID]] = None): - """ - Builds the inputs for the custom component. - - Args: - user_id (Optional[Union[str, UUID]], optional): The user ID. Defaults to None. + def build_inputs(self): + """Builds the inputs for the custom component. Returns: List[Input]: The list of inputs. @@ -661,8 +949,7 @@ def build_inputs(self, user_id: Optional[Union[str, UUID]] = None): self.inputs = self.template_config.get("inputs", []) if not self.inputs: return {} - build_config = {_input.name: _input.model_dump(by_alias=True, exclude_none=True) for _input in self.inputs} - return build_config + return {_input.name: _input.model_dump(by_alias=True, exclude_none=True) for _input in self.inputs} def _get_field_order(self): try: @@ -671,19 +958,185 @@ def _get_field_order(self): except KeyError: return [] - def build(self, **kwargs): + def build(self, **kwargs) -> None: self.set_attributes(kwargs) def _get_fallback_input(self, **kwargs): return Input(**kwargs) - def to_tool(self): - # TODO: This is a temporary solution to avoid circular imports - from langflow.base.tools.component_tool import ComponentTool - - return ComponentTool(component=self) + def to_toolkit(self) -> list[Tool]: + component_toolkit = _get_component_toolkit() + return component_toolkit(component=self).get_tools(callbacks=self.get_langchain_callbacks()) def get_project_name(self): - if hasattr(self, "_tracing_service"): + if hasattr(self, "_tracing_service") and self._tracing_service: return self._tracing_service.project_name return "Langflow" + + def log(self, message: LoggableType | list[LoggableType], name: str | None = None) -> None: + """Logs a message. + + Args: + message (LoggableType | list[LoggableType]): The message to log. + name (str, optional): The name of the log. Defaults to None. + """ + if name is None: + name = f"Log {len(self._logs) + 1}" + log = Log(message=message, type=get_artifact_type(message), name=name) + self._logs.append(log) + if self._tracing_service and self._vertex: + self._tracing_service.add_log(trace_name=self.trace_name, log=log) + if self._event_manager is not None and self._current_output: + data = log.model_dump() + data["output"] = self._current_output + data["component_id"] = self._id + self._event_manager.on_log(data=data) + + def _append_tool_output(self) -> None: + if next((output for output in self.outputs if output.name == TOOL_OUTPUT_NAME), None) is None: + self.outputs.append( + Output( + name=TOOL_OUTPUT_NAME, + display_name=TOOL_OUTPUT_DISPLAY_NAME, + method="to_toolkit", + types=["Tool"], + ) + ) + + def send_message(self, message: Message, id_: str | None = None): + if (hasattr(self, "graph") and self.graph.session_id) and (message is not None and not message.session_id): + message.session_id = self.graph.session_id + stored_message = self._store_message(message) + + self._stored_message_id = stored_message.id + try: + complete_message = "" + if ( + self._should_stream_message(stored_message, message) + and message is not None + and isinstance(message.text, AsyncIterator | Iterator) + ): + complete_message = self._stream_message(message.text, stored_message) + stored_message.text = complete_message + stored_message = self._update_stored_message(stored_message) + else: + # Only send message event for non-streaming messages + self._send_message_event(stored_message, id_=id_) + except Exception: + # remove the message from the database + delete_message(stored_message.id) + raise + self.status = stored_message + return stored_message + + def _store_message(self, message: Message) -> Message: + flow_id = self.graph.flow_id if hasattr(self, "graph") else None + messages = store_message(message, flow_id=flow_id) + if len(messages) != 1: + msg = "Only one message can be stored at a time." + raise ValueError(msg) + + return messages[0] + + def _send_message_event(self, message: Message, id_: str | None = None, category: str | None = None) -> None: + if hasattr(self, "_event_manager") and self._event_manager: + data_dict = message.data.copy() if hasattr(message, "data") else message.model_dump() + if id_ and not data_dict.get("id"): + data_dict["id"] = id_ + category = category or data_dict.get("category", None) + match category: + case "error": + self._event_manager.on_error(data=data_dict) + case "remove_message": + self._event_manager.on_remove_message(data={"id": data_dict["id"]}) + case _: + self._event_manager.on_message(data=data_dict) + + def _should_stream_message(self, stored_message: Message, original_message: Message) -> bool: + return bool( + hasattr(self, "_event_manager") + and self._event_manager + and stored_message.id + and not isinstance(original_message.text, str) + ) + + def _update_stored_message(self, stored_message: Message) -> Message: + message_tables = update_messages(stored_message) + if len(message_tables) != 1: + msg = "Only one message can be updated at a time." + raise ValueError(msg) + message_table = message_tables[0] + return Message(**message_table.model_dump()) + + def _stream_message(self, iterator: AsyncIterator | Iterator, message: Message) -> str: + if not isinstance(iterator, AsyncIterator | Iterator): + msg = "The message must be an iterator or an async iterator." + raise TypeError(msg) + + if isinstance(iterator, AsyncIterator): + return run_until_complete(self._handle_async_iterator(iterator, message.id, message)) + try: + complete_message = "" + first_chunk = True + for chunk in iterator: + complete_message = self._process_chunk( + chunk.content, complete_message, message.id, message, first_chunk=first_chunk + ) + first_chunk = False + except Exception as e: + raise StreamingError(cause=e, source=message.properties.source) from e + else: + return complete_message + + async def _handle_async_iterator(self, iterator: AsyncIterator, message_id: str, message: Message) -> str: + complete_message = "" + first_chunk = True + async for chunk in iterator: + complete_message = self._process_chunk( + chunk.content, complete_message, message_id, message, first_chunk=first_chunk + ) + first_chunk = False + return complete_message + + def _process_chunk( + self, chunk: str, complete_message: str, message_id: str, message: Message, *, first_chunk: bool = False + ) -> str: + complete_message += chunk + if self._event_manager: + if first_chunk: + # Send the initial message only on the first chunk + msg_copy = message.model_copy() + msg_copy.text = complete_message + self._send_message_event(msg_copy, id_=message_id) + self._event_manager.on_token( + data={ + "chunk": chunk, + "id": str(message_id), + } + ) + return complete_message + + def send_error( + self, + exception: Exception, + session_id: str, + trace_name: str, + source: Source, + ) -> Message: + """Send an error message to the frontend.""" + flow_id = self.graph.flow_id if hasattr(self, "graph") else None + error_message = ErrorMessage( + flow_id=flow_id, + exception=exception, + session_id=session_id, + trace_name=trace_name, + source=source, + ) + self.send_message(error_message) + return error_message + + def _append_tool_to_outputs_map(self): + self._outputs_map[TOOL_OUTPUT_NAME] = self._build_tool_output() + + def _build_tool_output(self) -> Output: + return Output(name=TOOL_OUTPUT_NAME, display_name=TOOL_OUTPUT_DISPLAY_NAME, method="to_toolkit", types=["Tool"]) diff --git a/src/backend/base/langflow/custom/custom_component/component_with_cache.py b/src/backend/base/langflow/custom/custom_component/component_with_cache.py new file mode 100644 index 000000000000..e8a6888d9039 --- /dev/null +++ b/src/backend/base/langflow/custom/custom_component/component_with_cache.py @@ -0,0 +1,8 @@ +from langflow.custom import Component +from langflow.services.deps import get_shared_component_cache_service + + +class ComponentWithCache(Component): + def __init__(self, **data) -> None: + super().__init__(**data) + self._shared_component_cache = get_shared_component_cache_service() diff --git a/src/backend/base/langflow/custom/custom_component/custom_component.py b/src/backend/base/langflow/custom/custom_component/custom_component.py index 1d428932f1fd..9e790d708250 100644 --- a/src/backend/base/langflow/custom/custom_component/custom_component.py +++ b/src/backend/base/langflow/custom/custom_component/custom_component.py @@ -1,5 +1,8 @@ +from __future__ import annotations + +from collections.abc import Callable, Sequence from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, ClassVar, List, Optional, Sequence, Union +from typing import TYPE_CHECKING, Any, ClassVar import yaml from cachetools import TTLCache @@ -9,13 +12,8 @@ from langflow.custom.custom_component.base_component import BaseComponent from langflow.helpers.flow import list_flows, load_flow, run_flow from langflow.schema import Data -from langflow.schema.artifact import get_artifact_type -from langflow.schema.dotdict import dotdict -from langflow.schema.log import LoggableType -from langflow.schema.schema import OutputValue from langflow.services.deps import get_storage_service, get_variable_service, session_scope from langflow.services.storage.service import StorageService -from langflow.services.tracing.schema import Log from langflow.template.utils import update_frontend_node_with_template_values from langflow.type_extraction.type_extraction import post_process_type from langflow.utils import validate @@ -25,13 +23,15 @@ from langflow.graph.graph.base import Graph from langflow.graph.vertex.base import Vertex + from langflow.schema.dotdict import dotdict + from langflow.schema.schema import OutputValue from langflow.services.storage.service import StorageService + from langflow.services.tracing.schema import Log from langflow.services.tracing.service import TracingService class CustomComponent(BaseComponent): - """ - Represents a custom component in Langflow. + """Represents a custom component in Langflow. Attributes: name (Optional[str]): This attribute helps the frontend apply styles to known components. @@ -48,102 +48,117 @@ class CustomComponent(BaseComponent): _tree (Optional[dict]): The code tree of the custom component. """ - name: Optional[str] = None + name: str | None = None """The name of the component used to styles. Defaults to None.""" - display_name: Optional[str] = None + display_name: str | None = None """The display name of the component. Defaults to None.""" - description: Optional[str] = None + description: str | None = None """The description of the component. Defaults to None.""" - icon: Optional[str] = None + icon: str | None = None """The icon of the component. It should be an emoji. Defaults to None.""" - is_input: Optional[bool] = None + is_input: bool | None = None """The input state of the component. Defaults to None. If True, the component must have a field named 'input_value'.""" - is_output: Optional[bool] = None + add_tool_output: bool | None = False + """Indicates whether the component will be treated as a tool. Defaults to False.""" + is_output: bool | None = None """The output state of the component. Defaults to None. If True, the component must have a field named 'input_value'.""" field_config: dict = {} """The field configuration of the component. Defaults to an empty dictionary.""" - field_order: Optional[List[str]] = None + field_order: list[str] | None = None """The field order of the component. Defaults to an empty list.""" - frozen: Optional[bool] = False + frozen: bool | None = False """The default frozen state of the component. Defaults to False.""" - build_parameters: Optional[dict] = None + build_parameters: dict | None = None """The build parameters of the component. Defaults to None.""" - _vertex: Optional["Vertex"] = None + _vertex: Vertex | None = None """The edge target parameter of the component. Defaults to None.""" _code_class_base_inheritance: ClassVar[str] = "CustomComponent" function_entrypoint_name: ClassVar[str] = "build" - function: Optional[Callable] = None - repr_value: Optional[Any] = "" - status: Optional[Any] = None + function: Callable | None = None + repr_value: Any | None = "" + status: Any | None = None """The status of the component. This is displayed on the frontend. Defaults to None.""" - _flows_data: Optional[List[Data]] = None - _outputs: List[OutputValue] = [] - _logs: List[Log] = [] - _output_logs: dict[str, Log] = {} - _tracing_service: Optional["TracingService"] = None - _tree: Optional[dict] = None - - def __init__(self, **data): - """ - Initializes a new instance of the CustomComponent class. + _flows_data: list[Data] | None = None + _outputs: list[OutputValue] = [] + _logs: list[Log] = [] + _output_logs: dict[str, list[Log] | Log] = {} + _tracing_service: TracingService | None = None + _tree: dict | None = None + + def __init__(self, **data) -> None: + """Initializes a new instance of the CustomComponent class. Args: **data: Additional keyword arguments to initialize the custom component. """ - self.cache = TTLCache(maxsize=1024, ttl=60) - self._logs = [] - self._results = {} - self._artifacts = {} + self.cache: TTLCache = TTLCache(maxsize=1024, ttl=60) + self._logs: list[Log] = [] + self._results: dict = {} + self._artifacts: dict = {} super().__init__(**data) - def set_attributes(self, parameters: dict): + def set_attributes(self, parameters: dict) -> None: pass - def set_parameters(self, parameters: dict): + def set_parameters(self, parameters: dict) -> None: self._parameters = parameters self.set_attributes(self._parameters) @property - def trace_name(self): - return f"{self.display_name} ({self._vertex.id})" - - def update_state(self, name: str, value: Any): + def trace_name(self) -> str: + if hasattr(self, "_id") and self._id is None: + msg = "Component id is not set" + raise ValueError(msg) + if hasattr(self, "_id"): + return f"{self.display_name} ({self._id})" + return f"{self.display_name}" + + def update_state(self, name: str, value: Any) -> None: if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self._vertex.graph.update_state(name=name, record=value, caller=self._vertex.id) except Exception as e: - raise ValueError(f"Error updating state: {e}") + msg = f"Error updating state: {e}" + raise ValueError(msg) from e - def stop(self, output_name: str | None = None): + def stop(self, output_name: str | None = None) -> None: if not output_name and self._vertex and len(self._vertex.outputs) == 1: output_name = self._vertex.outputs[0]["name"] elif not output_name: - raise ValueError("You must specify an output name to call stop") + msg = "You must specify an output name to call stop" + raise ValueError(msg) if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self.graph.mark_branch(vertex_id=self._vertex.id, output_name=output_name, state="INACTIVE") except Exception as e: - raise ValueError(f"Error stopping {self.display_name}: {e}") + msg = f"Error stopping {self.display_name}: {e}" + raise ValueError(msg) from e - def append_state(self, name: str, value: Any): + def append_state(self, name: str, value: Any) -> None: if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: self._vertex.graph.append_state(name=name, record=value, caller=self._vertex.id) except Exception as e: - raise ValueError(f"Error appending state: {e}") + msg = f"Error appending state: {e}" + raise ValueError(msg) from e def get_state(self, name: str): if not self._vertex: - raise ValueError("Vertex is not set") + msg = "Vertex is not set" + raise ValueError(msg) try: return self._vertex.graph.get_state(name=name) except Exception as e: - raise ValueError(f"Error getting state: {e}") + msg = f"Error getting state: {e}" + raise ValueError(msg) from e @staticmethod def resolve_path(path: str) -> str: @@ -159,7 +174,7 @@ def resolve_path(path: str) -> str: return str(path_object) def get_full_path(self, path: str) -> str: - storage_svc: "StorageService" = get_storage_service() + storage_svc: StorageService = get_storage_service() flow_id, file_name = path.split("/", 1) return storage_svc.build_full_path(flow_id, file_name) @@ -186,8 +201,7 @@ def _get_field_order(self): return self.field_order or list(self.field_config.keys()) def custom_repr(self): - """ - Returns the custom representation of the custom component. + """Returns the custom representation of the custom component. Returns: str: The custom representation of the custom component. @@ -203,8 +217,7 @@ def custom_repr(self): return self.repr_value def build_config(self): - """ - Builds the configuration for the custom component. + """Builds the configuration for the custom component. Returns: dict: The configuration for the custom component. @@ -215,32 +228,33 @@ def update_build_config( self, build_config: dotdict, field_value: Any, - field_name: Optional[str] = None, + field_name: str | None = None, ): - build_config[field_name] = field_value + build_config[field_name]["value"] = field_value return build_config @property def tree(self): - """ - Gets the code tree of the custom component. + """Gets the code tree of the custom component. Returns: dict: The code tree of the custom component. """ return self.get_code_tree(self._code or "") - def to_data(self, data: Any, keys: Optional[List[str]] = None, silent_errors: bool = False) -> List[Data]: - """ - Converts input data into a list of Data objects. + def to_data(self, data: Any, *, keys: list[str] | None = None, silent_errors: bool = False) -> list[Data]: + """Converts input data into a list of Data objects. Args: data (Any): The input data to be converted. It can be a single item or a sequence of items. - If the input data is a Langchain Document, text_key and data_key are ignored. + If the input data is a Langchain Document, text_key and data_key are ignored. keys (List[str], optional): The keys to access the text and data values in each item. - It should be a list of strings where the first element is the text key and the second element is the data key. + It should be a list of strings where the first element is the text key and the second element + is the data key. Defaults to None, in which case the default keys "text" and "data" are used. + silent_errors (bool, optional): Whether to suppress errors when the specified keys are not found + in the data. Returns: List[Data]: A list of Data objects. @@ -267,15 +281,17 @@ def to_data(self, data: Any, keys: Optional[List[str]] = None, silent_errors: bo else: try: data_dict[key] = model_dump[key] - except KeyError: - raise ValueError(f"Key {key} not found in {item}") + except KeyError as e: + msg = f"Key {key} not found in {item}" + raise ValueError(msg) from e elif isinstance(item, str): data_dict = {"text": item} elif isinstance(item, dict): data_dict = item.copy() else: - raise ValueError(f"Invalid data type: {type(item)}") + msg = f"Invalid data type: {type(item)}" + raise TypeError(msg) data_objects.append(Data(data=data_dict)) @@ -289,9 +305,8 @@ def get_method_return_type(self, method_name: str): return self._extract_return_type(return_type) - def create_references_from_data(self, data: List[Data], include_data: bool = False) -> str: - """ - Create references from a list of data. + def create_references_from_data(self, data: list[Data], *, include_data: bool = False) -> str: + """Create references from a list of data. Args: data (List[dict]): A list of data, where each record is a dictionary. @@ -312,8 +327,7 @@ def create_references_from_data(self, data: List[Data], include_data: bool = Fal @property def get_function_entrypoint_args(self) -> list: - """ - Gets the arguments of the function entrypoint for the custom component. + """Gets the arguments of the function entrypoint for the custom component. Returns: list: The arguments of the function entrypoint. @@ -330,8 +344,7 @@ def get_function_entrypoint_args(self) -> list: return args def get_method(self, method_name: str): - """ - Gets the build method for the custom component. + """Gets the build method for the custom component. Returns: dict: The build method for the custom component. @@ -352,22 +365,20 @@ def get_method(self, method_name: str): return build_methods[0] if build_methods else {} @property - def get_function_entrypoint_return_type(self) -> List[Any]: - """ - Gets the return type of the function entrypoint for the custom component. + def _get_function_entrypoint_return_type(self) -> list[Any]: + """Gets the return type of the function entrypoint for the custom component. Returns: List[Any]: The return type of the function entrypoint. """ return self.get_method_return_type(self._function_entrypoint_name) - def _extract_return_type(self, return_type: Any) -> List[Any]: + def _extract_return_type(self, return_type: Any) -> list[Any]: return post_process_type(return_type) @property def get_main_class_name(self): - """ - Gets the main class name of the custom component. + """Gets the main class name of the custom component. Returns: str: The main class name of the custom component. @@ -390,8 +401,7 @@ def get_main_class_name(self): @property def template_config(self): - """ - Gets the template configuration for the custom component. + """Gets the template configuration for the custom component. Returns: dict: The template configuration for the custom component. @@ -402,8 +412,7 @@ def template_config(self): @property def variables(self): - """ - Returns the variable for the current user with the specified name. + """Returns the variable for the current user with the specified name. Raises: ValueError: If the user id is not set. @@ -414,7 +423,8 @@ def variables(self): def get_variable(name: str, field: str): if hasattr(self, "_user_id") and not self.user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") + msg = f"User id is not set for {self.__class__.__name__}" + raise ValueError(msg) variable_service = get_variable_service() # Get service instance # Retrieve and decrypt the variable by name for the current user with session_scope() as session: @@ -424,8 +434,7 @@ def get_variable(name: str, field: str): return get_variable def list_key_names(self): - """ - Lists the names of the variables for the current user. + """Lists the names of the variables for the current user. Raises: ValueError: If the user id is not set. @@ -434,15 +443,15 @@ def list_key_names(self): List[str]: The names of the variables for the current user. """ if hasattr(self, "_user_id") and not self.user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") + msg = f"User id is not set for {self.__class__.__name__}" + raise ValueError(msg) variable_service = get_variable_service() with session_scope() as session: return variable_service.list_variables(user_id=self.user_id, session=session) def index(self, value: int = 0): - """ - Returns a function that returns the value at the given index in the iterable. + """Returns a function that returns the value at the given index in the iterable. Args: value (int): The index value. @@ -451,32 +460,32 @@ def index(self, value: int = 0): Callable: A function that returns the value at the given index. """ - def get_index(iterable: List[Any]): + def get_index(iterable: list[Any]): return iterable[value] if iterable else iterable return get_index def get_function(self): - """ - Gets the function associated with the custom component. + """Gets the function associated with the custom component. Returns: Callable: The function associated with the custom component. """ return validate.create_function(self._code, self._function_entrypoint_name) - async def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> "Graph": + async def load_flow(self, flow_id: str, tweaks: dict | None = None) -> Graph: if not self.user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) return await load_flow(user_id=str(self._user_id), flow_id=flow_id, tweaks=tweaks) async def run_flow( self, - inputs: Optional[Union[dict, List[dict]]] = None, - flow_id: Optional[str] = None, - flow_name: Optional[str] = None, - output_type: Optional[str] = "chat", - tweaks: Optional[dict] = None, + inputs: dict | list[dict] | None = None, + flow_id: str | None = None, + flow_name: str | None = None, + output_type: str | None = "chat", + tweaks: dict | None = None, ) -> Any: return await run_flow( inputs=inputs, @@ -485,19 +494,21 @@ async def run_flow( flow_name=flow_name, tweaks=tweaks, user_id=str(self._user_id), + run_id=self.graph.run_id, ) - def list_flows(self) -> List[Data]: + def list_flows(self) -> list[Data]: if not self.user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) try: return list_flows(user_id=str(self._user_id)) except Exception as e: - raise ValueError(f"Error listing flows: {e}") + msg = f"Error listing flows: {e}" + raise ValueError(msg) from e def build(self, *args: Any, **kwargs: Any) -> Any: - """ - Builds the custom component. + """Builds the custom component. Args: *args: The positional arguments. @@ -508,30 +519,13 @@ def build(self, *args: Any, **kwargs: Any) -> Any: """ raise NotImplementedError - def log(self, message: LoggableType | list[LoggableType], name: Optional[str] = None): - """ - Logs a message. - - Args: - message (LoggableType | list[LoggableType]): The message to log. - """ - if name is None: - name = f"Log {len(self._logs) + 1}" - log = Log(message=message, type=get_artifact_type(message), name=name) - self._logs.append(log) - if self._tracing_service and self._vertex: - self._tracing_service.add_log(trace_name=self.trace_name, log=log) - def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict): - """ - This function is called after the code validation is done. - """ - frontend_node = update_frontend_node_with_template_values( + """This function is called after the code validation is done.""" + return update_frontend_node_with_template_values( frontend_node=new_frontend_node, raw_frontend_node=current_frontend_node ) - return frontend_node - def get_langchain_callbacks(self) -> List["BaseCallbackHandler"]: + def get_langchain_callbacks(self) -> list[BaseCallbackHandler]: if self._tracing_service: return self._tracing_service.get_langchain_callbacks() return [] diff --git a/src/backend/base/langflow/custom/directory_reader/directory_reader.py b/src/backend/base/langflow/custom/directory_reader/directory_reader.py index e9e74a9acd7a..30a0d1619d0a 100644 --- a/src/backend/base/langflow/custom/directory_reader/directory_reader.py +++ b/src/backend/base/langflow/custom/directory_reader/directory_reader.py @@ -1,6 +1,5 @@ import ast import asyncio -import os import zlib from pathlib import Path @@ -14,14 +13,12 @@ class CustomComponentPathValueError(ValueError): class StringCompressor: - def __init__(self, input_string): + def __init__(self, input_string) -> None: """Initialize StringCompressor with a string to compress.""" self.input_string = input_string def compress_string(self): - """ - Compress the initial string and return the compressed data. - """ + """Compress the initial string and return the compressed data.""" # Convert string to bytes byte_data = self.input_string.encode("utf-8") # Compress the bytes @@ -30,9 +27,7 @@ def compress_string(self): return self.compressed_data def decompress_string(self): - """ - Decompress the compressed data and return the original string. - """ + """Decompress the compressed data and return the original string.""" # Decompress the bytes decompressed_data = zlib.decompress(self.compressed_data) # Convert bytes back to string @@ -44,11 +39,8 @@ class DirectoryReader: # the custom components from this directory. base_path = "" - def __init__(self, directory_path, compress_code_field=False): - """ - Initialize DirectoryReader with a directory path - and a flag indicating whether to compress the code. - """ + def __init__(self, directory_path, *, compress_code_field=False) -> None: + """Initialize DirectoryReader with a directory path and a flag indicating whether to compress the code.""" self.directory_path = directory_path self.compress_code_field = compress_code_field @@ -58,16 +50,14 @@ def get_safe_path(self): def is_valid_path(self) -> bool: """Check if the directory path is valid by comparing it to the base path.""" - fullpath = os.path.normpath(os.path.join(self.directory_path)) - return fullpath.startswith(self.base_path) + fullpath = Path(self.directory_path).resolve() + return not self.base_path or fullpath.is_relative_to(self.base_path) def is_empty_file(self, file_content): - """ - Check if the file content is empty. - """ + """Check if the file content is empty.""" return len(file_content.strip()) == 0 - def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: + def filter_loaded_components(self, data: dict, *, with_errors: bool) -> dict: from langflow.custom.utils import build_component items = [] @@ -78,53 +68,47 @@ def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: if component["error"] if with_errors else not component["error"]: component_tuple = (*build_component(component), component) components.append(component_tuple) - except Exception as e: - logger.debug(f"Error while loading component { component['name']}") - logger.debug(e) + except Exception: # noqa: BLE001 + logger.debug(f"Error while loading component {component['name']} from {component['file']}") continue items.append({"name": menu["name"], "path": menu["path"], "components": components}) filtered = [menu for menu in items if menu["components"]] logger.debug(f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}') return {"menu": filtered} - def validate_code(self, file_content): - """ - Validate the Python code by trying to parse it with ast.parse. - """ + def validate_code(self, file_content) -> bool: + """Validate the Python code by trying to parse it with ast.parse.""" try: ast.parse(file_content) - return True except SyntaxError: return False + return True def validate_build(self, file_content): - """ - Check if the file content contains a function named 'build'. - """ + """Check if the file content contains a function named 'build'.""" return "def build" in file_content def read_file_content(self, file_path): - """ - Read and return the content of a file. - """ - if not os.path.isfile(file_path): + """Read and return the content of a file.""" + _file_path = Path(file_path) + if not _file_path.is_file(): return None - with open(file_path, "r", encoding="utf-8") as file: - # UnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 3069: character maps to - try: + try: + with _file_path.open(encoding="utf-8") as file: + # UnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 3069: + # character maps to return file.read() - except UnicodeDecodeError: - # This is happening in Windows, so we need to open the file in binary mode - # The file is always just a python file, so we can safely read it as utf-8 - with open(file_path, "rb") as file: - return file.read().decode("utf-8") + except UnicodeDecodeError: + # This is happening in Windows, so we need to open the file in binary mode + # The file is always just a python file, so we can safely read it as utf-8 + with _file_path.open("rb") as f: + return f.read().decode("utf-8") def get_files(self): - """ - Walk through the directory path and return a list of all .py files. - """ + """Walk through the directory path and return a list of all .py files.""" if not (safe_path := self.get_safe_path()): - raise CustomComponentPathValueError(f"The path needs to start with '{self.base_path}'.") + msg = f"The path needs to start with '{self.base_path}'." + raise CustomComponentPathValueError(msg) file_list = [] safe_path_obj = Path(safe_path) @@ -143,19 +127,14 @@ def get_files(self): return file_list def find_menu(self, response, menu_name): - """ - Find and return a menu by its name in the response. - """ + """Find and return a menu by its name in the response.""" return next( (menu for menu in response["menu"] if menu["name"] == menu_name), None, ) def _is_type_hint_imported(self, type_hint_name: str, code: str) -> bool: - """ - Check if a specific type hint is imported - from the typing module in the given code. - """ + """Check if a specific type hint is imported from the typing module in the given code.""" module = ast.parse(code) return any( @@ -166,10 +145,7 @@ def _is_type_hint_imported(self, type_hint_name: str, code: str) -> bool: ) def _is_type_hint_used_in_args(self, type_hint_name: str, code: str) -> bool: - """ - Check if a specific type hint is used in the - function definitions within the given code. - """ + """Check if a specific type hint is used in the function definitions within the given code.""" try: module = ast.parse(code) @@ -184,9 +160,7 @@ def _is_type_hint_used_in_args(self, type_hint_name: str, code: str) -> bool: return False def _is_type_hint_in_arg_annotation(self, annotation, type_hint_name: str) -> bool: - """ - Helper function to check if a type hint exists in an annotation. - """ + """Helper function to check if a type hint exists in an annotation.""" return ( annotation is not None and isinstance(annotation, ast.Subscript) @@ -195,9 +169,7 @@ def _is_type_hint_in_arg_annotation(self, annotation, type_hint_name: str) -> bo ) def is_type_hint_used_but_not_imported(self, type_hint_name: str, code: str) -> bool: - """ - Check if a type hint is used but not imported in the given code. - """ + """Check if a type hint is used but not imported in the given code.""" try: return self._is_type_hint_used_in_args(type_hint_name, code) and not self._is_type_hint_imported( type_hint_name, code @@ -208,53 +180,46 @@ def is_type_hint_used_but_not_imported(self, type_hint_name: str, code: str) -> return True def process_file(self, file_path): - """ - Process a file by validating its content and - returning the result and content/error message. - """ + """Process a file by validating its content and returning the result and content/error message.""" try: file_content = self.read_file_content(file_path) - except Exception as exc: - logger.exception(exc) - logger.error(f"Error while reading file {file_path}: {str(exc)}") + except Exception: # noqa: BLE001 + logger.exception(f"Error while reading file {file_path}") return False, f"Could not read {file_path}" if file_content is None: return False, f"Could not read {file_path}" - elif self.is_empty_file(file_content): + if self.is_empty_file(file_content): return False, "Empty file" - elif not self.validate_code(file_content): + if not self.validate_code(file_content): return False, "Syntax error" - elif self._is_type_hint_used_in_args("Optional", file_content) and not self._is_type_hint_imported( + if self._is_type_hint_used_in_args("Optional", file_content) and not self._is_type_hint_imported( "Optional", file_content ): return ( False, "Type hint 'Optional' is used but not imported in the code.", ) - else: - if self.compress_code_field: - file_content = str(StringCompressor(file_content).compress_string()) - return True, file_content + if self.compress_code_field: + file_content = str(StringCompressor(file_content).compress_string()) + return True, file_content def build_component_menu_list(self, file_paths): - """ - Build a list of menus with their components - from the .py files in the directory. - """ + """Build a list of menus with their components from the .py files in the directory.""" response = {"menu": []} logger.debug("-------------------- Building component menu list --------------------") for file_path in file_paths: - menu_name = os.path.basename(os.path.dirname(file_path)) - filename = os.path.basename(file_path) + _file_path = Path(file_path) + menu_name = _file_path.parent.name + filename = _file_path.name validation_result, result_content = self.process_file(file_path) if not validation_result: logger.error(f"Error while processing file {file_path}") menu_result = self.find_menu(response, menu_name) or { "name": menu_name, - "path": os.path.dirname(file_path), + "path": str(_file_path.parent), "components": [], } component_name = filename.split(".")[0] @@ -270,7 +235,8 @@ def build_component_menu_list(self, file_paths): if validation_result: try: output_types = self.get_output_types_from_code(result_content) - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error while getting output types from code") output_types = [component_name_camelcase] else: output_types = [component_name_camelcase] @@ -291,29 +257,27 @@ def build_component_menu_list(self, file_paths): async def process_file_async(self, file_path): try: - file_content = self.read_file_content(file_path) - except Exception as exc: - logger.exception(exc) - logger.error(f"Error while reading file {file_path}: {str(exc)}") + file_content = await asyncio.to_thread(self.read_file_content, file_path) + except Exception: # noqa: BLE001 + logger.exception(f"Error while reading file {file_path}") return False, f"Could not read {file_path}" if file_content is None: return False, f"Could not read {file_path}" - elif self.is_empty_file(file_content): + if self.is_empty_file(file_content): return False, "Empty file" - elif not self.validate_code(file_content): + if not self.validate_code(file_content): return False, "Syntax error" - elif self._is_type_hint_used_in_args("Optional", file_content) and not self._is_type_hint_imported( + if self._is_type_hint_used_in_args("Optional", file_content) and not self._is_type_hint_imported( "Optional", file_content ): return ( False, "Type hint 'Optional' is used but not imported in the code.", ) - else: - if self.compress_code_field: - file_content = str(StringCompressor(file_content).compress_string()) - return True, file_content + if self.compress_code_field: + file_content = str(StringCompressor(file_content).compress_string()) + return True, file_content async def get_output_types_from_code_async(self, code: str): return await asyncio.to_thread(self.get_output_types_from_code, code) @@ -325,16 +289,17 @@ async def abuild_component_menu_list(self, file_paths): tasks = [self.process_file_async(file_path) for file_path in file_paths] results = await asyncio.gather(*tasks) - for file_path, (validation_result, result_content) in zip(file_paths, results): - menu_name = os.path.basename(os.path.dirname(file_path)) - filename = os.path.basename(file_path) + for file_path, (validation_result, result_content) in zip(file_paths, results, strict=True): + _file_path = Path(file_path) + menu_name = _file_path.parent.name + filename = _file_path.name if not validation_result: logger.error(f"Error while processing file {file_path}") menu_result = self.find_menu(response, menu_name) or { "name": menu_name, - "path": os.path.dirname(file_path), + "path": str(_file_path.parent), "components": [], } component_name = filename.split(".")[0] @@ -347,8 +312,8 @@ async def abuild_component_menu_list(self, file_paths): if validation_result: try: output_types = await self.get_output_types_from_code_async(result_content) - except Exception as exc: - logger.error(f"Error while getting output types from code: {str(exc)}") + except Exception: # noqa: BLE001 + logger.exception("Error while getting output types from code") output_types = [component_name_camelcase] else: output_types = [component_name_camelcase] @@ -370,11 +335,9 @@ async def abuild_component_menu_list(self, file_paths): @staticmethod def get_output_types_from_code(code: str) -> list: - """ - Get the output types from the code. - """ + """Get the output types from the code.""" custom_component = Component(_code=code) - types_list = custom_component.get_function_entrypoint_return_type + types_list = custom_component._get_function_entrypoint_return_type # Get the name of types classes return [type_.__name__ for type_ in types_list if hasattr(type_, "__name__")] diff --git a/src/backend/base/langflow/custom/directory_reader/utils.py b/src/backend/base/langflow/custom/directory_reader/utils.py index 331b72d2a8d5..d982252a540d 100644 --- a/src/backend/base/langflow/custom/directory_reader/utils.py +++ b/src/backend/base/langflow/custom/directory_reader/utils.py @@ -42,7 +42,7 @@ def build_valid_menu(valid_components): def build_and_validate_all_files(reader: DirectoryReader, file_list): - """Build and validate all files""" + """Build and validate all files.""" data = reader.build_component_menu_list(file_list) valid_components = reader.filter_loaded_components(data=data, with_errors=False) @@ -52,7 +52,7 @@ def build_and_validate_all_files(reader: DirectoryReader, file_list): async def abuild_and_validate_all_files(reader: DirectoryReader, file_list): - """Build and validate all files""" + """Build and validate all files.""" data = await reader.abuild_component_menu_list(file_list) valid_components = reader.filter_loaded_components(data=data, with_errors=False) @@ -62,16 +62,16 @@ async def abuild_and_validate_all_files(reader: DirectoryReader, file_list): def load_files_from_path(path: str): - """Load all files from a given path""" - reader = DirectoryReader(path, False) + """Load all files from a given path.""" + reader = DirectoryReader(path, compress_code_field=False) return reader.get_files() def build_custom_component_list_from_path(path: str): - """Build a list of custom components for the langchain from a given path""" + """Build a list of custom components for the langchain from a given path.""" file_list = load_files_from_path(path) - reader = DirectoryReader(path, False) + reader = DirectoryReader(path, compress_code_field=False) valid_components, invalid_components = build_and_validate_all_files(reader, file_list) @@ -82,9 +82,9 @@ def build_custom_component_list_from_path(path: str): async def abuild_custom_component_list_from_path(path: str): - """Build a list of custom components for the langchain from a given path""" + """Build a list of custom components for the langchain from a given path.""" file_list = load_files_from_path(path) - reader = DirectoryReader(path, False) + reader = DirectoryReader(path, compress_code_field=False) valid_components, invalid_components = await abuild_and_validate_all_files(reader, file_list) @@ -109,7 +109,7 @@ def create_invalid_component_template(component, component_name): return component_frontend_node.model_dump(by_alias=True, exclude_none=True) -def log_invalid_component_details(component): +def log_invalid_component_details(component) -> None: """Log details of an invalid component.""" logger.debug(component) logger.debug(f"Component Path: {component.get('path', None)}") @@ -132,8 +132,8 @@ def build_invalid_menu_items(menu_item): component_name, component_template = build_invalid_component(component) menu_items[component_name] = component_template logger.debug(f"Added {component_name} to invalid menu.") - except Exception as exc: - logger.exception(f"Error while creating custom component [{component_name}]: {str(exc)}") + except Exception: # noqa: BLE001 + logger.exception(f"Error while creating custom component [{component_name}]") return menu_items @@ -165,7 +165,6 @@ def build_menu_items(menu_item): for component_name, component_template, component in menu_item["components"]: try: menu_items[component_name] = component_template - except Exception as exc: - logger.error(f"Error loading Component: {component['output_types']}") - logger.exception(f"Error while building custom component {component['output_types']}: {exc}") + except Exception: # noqa: BLE001 + logger.exception(f"Error while building custom component {component['output_types']}") return menu_items diff --git a/src/backend/base/langflow/custom/eval.py b/src/backend/base/langflow/custom/eval.py index baa2024026aa..b163e8ef7ea1 100644 --- a/src/backend/base/langflow/custom/eval.py +++ b/src/backend/base/langflow/custom/eval.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Type +from typing import TYPE_CHECKING from langflow.utils import validate @@ -6,7 +6,7 @@ from langflow.custom import CustomComponent -def eval_custom_component_code(code: str) -> Type["CustomComponent"]: - """Evaluate custom component code""" +def eval_custom_component_code(code: str) -> type["CustomComponent"]: + """Evaluate custom component code.""" class_name = validate.extract_class_name(code) return validate.create_class(code, class_name) diff --git a/src/backend/base/langflow/custom/schema.py b/src/backend/base/langflow/custom/schema.py index 1636882effda..5c90356b38cd 100644 --- a/src/backend/base/langflow/custom/schema.py +++ b/src/backend/base/langflow/custom/schema.py @@ -1,38 +1,32 @@ -from typing import Any, Optional +from typing import Any from pydantic import BaseModel, Field class ClassCodeDetails(BaseModel): - """ - A dataclass for storing details about a class. - """ + """A dataclass for storing details about a class.""" name: str - doc: Optional[str] = None + doc: str | None = None bases: list attributes: list methods: list - init: Optional[dict] = Field(default_factory=dict) + init: dict | None = Field(default_factory=dict) class CallableCodeDetails(BaseModel): - """ - A dataclass for storing details about a callable. - """ + """A dataclass for storing details about a callable.""" name: str - doc: Optional[str] = None + doc: str | None = None args: list body: list - return_type: Optional[Any] = None + return_type: Any | None = None has_return: bool = False class MissingDefault: - """ - A class to represent a missing default value. - """ + """A class to represent a missing default value.""" - def __repr__(self): + def __repr__(self) -> str: return "MISSING" diff --git a/src/backend/base/langflow/custom/tree_visitor.py b/src/backend/base/langflow/custom/tree_visitor.py new file mode 100644 index 000000000000..57579a52462b --- /dev/null +++ b/src/backend/base/langflow/custom/tree_visitor.py @@ -0,0 +1,21 @@ +import ast +from typing import Any + +from typing_extensions import override + + +class RequiredInputsVisitor(ast.NodeVisitor): + def __init__(self, inputs: dict[str, Any]): + self.inputs: dict[str, Any] = inputs + self.required_inputs: set[str] = set() + + @override + def visit_Attribute(self, node) -> None: + if ( + isinstance(node.value, ast.Name) + and node.value.id == "self" + and node.attr in self.inputs + and self.inputs[node.attr].required + ): + self.required_inputs.add(node.attr) + self.generic_visit(node) diff --git a/src/backend/base/langflow/custom/utils.py b/src/backend/base/langflow/custom/utils.py index c92d208c48dc..aaab88497b5f 100644 --- a/src/backend/base/langflow/custom/utils.py +++ b/src/backend/base/langflow/custom/utils.py @@ -2,8 +2,7 @@ import contextlib import re import traceback -import warnings -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any from uuid import UUID from fastapi import HTTPException @@ -33,8 +32,8 @@ class UpdateBuildConfigError(Exception): pass -def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: List[str]): - """Add output types to the frontend node""" +def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: list[str]) -> None: + """Add output types to the frontend node.""" for return_type in return_types: if return_type is None: raise HTTPException( @@ -44,19 +43,19 @@ def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: L "traceback": traceback.format_exc(), }, ) - if return_type == str: - return_type = "Text" + if return_type is str: + _return_type = "Text" elif hasattr(return_type, "__name__"): - return_type = return_type.__name__ + _return_type = return_type.__name__ elif hasattr(return_type, "__class__"): - return_type = return_type.__class__.__name__ + _return_type = return_type.__class__.__name__ else: - return_type = str(return_type) + _return_type = str(return_type) - frontend_node.add_output_type(return_type) + frontend_node.add_output_type(_return_type) -def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: List[str]): +def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: list[str]) -> None: """Reorder fields in the frontend node based on the specified field_order.""" if not field_order: return @@ -65,15 +64,13 @@ def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: List field_dict = {field.name: field for field in frontend_node.template.fields} reordered_fields = [field_dict[name] for name in field_order if name in field_dict] # Add any fields that are not in the field_order list - for field in frontend_node.template.fields: - if field.name not in field_order: - reordered_fields.append(field) + reordered_fields.extend(field for field in frontend_node.template.fields if field.name not in field_order) frontend_node.template.fields = reordered_fields frontend_node.field_order = field_order -def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: List[str]): - """Add base classes to the frontend node""" +def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: list[str]) -> None: + """Add base classes to the frontend node.""" for return_type_instance in return_types: if return_type_instance is None: raise HTTPException( @@ -85,7 +82,7 @@ def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: L ) base_classes = get_base_classes(return_type_instance) - if return_type_instance == str: + if return_type_instance is str: base_classes.append("Text") for base_class in base_classes: @@ -93,8 +90,7 @@ def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: L def extract_type_from_optional(field_type): - """ - Extract the type from a string formatted as "Optional[]". + """Extract the type from a string formatted as "Optional[]". Parameters: field_type (str): The string from which to extract the type. @@ -109,7 +105,7 @@ def extract_type_from_optional(field_type): def get_field_properties(extra_field): - """Get the properties of an extra field""" + """Get the properties of an extra field.""" field_name = extra_field["name"] field_type = extra_field.get("type", "str") field_value = extra_field.get("default", "") @@ -128,18 +124,19 @@ def get_field_properties(extra_field): def process_type(field_type: str): - if field_type.startswith("list") or field_type.startswith("List"): + if field_type.startswith(("list", "List")): return extract_inner_type(field_type) # field_type is a string can be Prompt or Code too # so we just need to lower if it is the case lowercase_type = field_type.lower() - if lowercase_type in ["prompt", "code"]: + if lowercase_type in {"prompt", "code"}: return lowercase_type return field_type def add_new_custom_field( + *, frontend_node: CustomComponentFrontendNode, field_name: str, field_type: str, @@ -177,7 +174,7 @@ def add_new_custom_field( field_config["is_list"] = is_list or field_config.get("list", False) or field_contains_list if "name" in field_config: - warnings.warn("The 'name' key in field_config is used to build the object and can't be changed.") + logger.warning("The 'name' key in field_config is used to build the object and can't be changed.") required = field_config.pop("required", field_required) placeholder = field_config.pop("placeholder", "") @@ -199,8 +196,8 @@ def add_new_custom_field( return frontend_node -def add_extra_fields(frontend_node, field_config, function_args): - """Add extra fields to the frontend node""" +def add_extra_fields(frontend_node, field_config, function_args) -> None: + """Add extra fields to the frontend node.""" if not function_args: return _field_config = field_config.copy() @@ -209,42 +206,41 @@ def add_extra_fields(frontend_node, field_config, function_args): # then we need to add the extra fields for extra_field in function_args: - if "name" not in extra_field or extra_field["name"] in [ + if "name" not in extra_field or extra_field["name"] in { "self", "kwargs", "args", - ]: + }: continue field_name, field_type, field_value, field_required = get_field_properties(extra_field) config = _field_config.pop(field_name, {}) frontend_node = add_new_custom_field( - frontend_node, - field_name, - field_type, - field_value, - field_required, - config, + frontend_node=frontend_node, + field_name=field_name, + field_type=field_type, + field_value=field_value, + field_required=field_required, + field_config=config, ) - if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config.keys()): - for field_name, field_config in _field_config.copy().items(): - if "name" not in field_config or field_name == "code": + if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config): + for field_name, config in _field_config.items(): + if "name" not in config or field_name == "code": continue - config = _field_config.get(field_name, {}) - config = config.model_dump() if isinstance(config, BaseModel) else config - field_name, field_type, field_value, field_required = get_field_properties(extra_field=config) + _config = config.model_dump() if isinstance(config, BaseModel) else config + _field_name, field_type, field_value, field_required = get_field_properties(extra_field=_config) frontend_node = add_new_custom_field( - frontend_node, - field_name, - field_type, - field_value, - field_required, - config, + frontend_node=frontend_node, + field_name=_field_name, + field_type=field_type, + field_value=field_value, + field_required=field_required, + field_config=_config, ) -def get_field_dict(field: Union[Input, dict]): - """Get the field dictionary from a Input or a dict""" +def get_field_dict(field: Input | dict): + """Get the field dictionary from a Input or a dict.""" if isinstance(field, Input): return dotdict(field.model_dump(by_alias=True, exclude_none=True)) return field @@ -252,91 +248,99 @@ def get_field_dict(field: Union[Input, dict]): def run_build_inputs( custom_component: Component, - user_id: Optional[Union[str, UUID]] = None, ): """Run the build inputs of a custom component.""" try: - field_config = custom_component.build_inputs(user_id=user_id) + return custom_component.build_inputs() # add_extra_fields(frontend_node, field_config, field_config.values()) - return field_config except Exception as exc: - logger.error(f"Error running build inputs: {exc}") + logger.exception("Error running build inputs") raise HTTPException(status_code=500, detail=str(exc)) from exc -def get_component_instance(custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None): - try: - if custom_component._code is None: - raise ValueError("Code is None") - elif isinstance(custom_component._code, str): +def get_component_instance(custom_component: CustomComponent, user_id: str | UUID | None = None): + if custom_component._code is None: + error = "Code is None" + elif not isinstance(custom_component._code, str): + error = "Invalid code type" + else: + try: custom_class = eval_custom_component_code(custom_component._code) - else: - raise ValueError("Invalid code type") - except Exception as exc: - logger.error(f"Error while evaluating custom component code: {str(exc)}") - raise HTTPException( - status_code=400, - detail={ - "error": ("Invalid type convertion. Please check your code and try again."), - "traceback": traceback.format_exc(), - }, - ) from exc - - try: - custom_instance = custom_class(_user_id=user_id, _code=custom_component._code) - return custom_instance - except Exception as exc: - logger.error(f"Error while instantiating custom component: {str(exc)}") - if hasattr(exc, "detail") and "traceback" in exc.detail: - logger.error(exc.detail["traceback"]) - - raise exc + except Exception as exc: + logger.exception("Error while evaluating custom component code") + raise HTTPException( + status_code=400, + detail={ + "error": ("Invalid type conversion. Please check your code and try again."), + "traceback": traceback.format_exc(), + }, + ) from exc + + try: + return custom_class(_user_id=user_id, _code=custom_component._code) + except Exception as exc: + logger.exception("Error while instantiating custom component") + if hasattr(exc, "detail") and "traceback" in exc.detail: + logger.error(exc.detail["traceback"]) + + raise + + msg = f"Invalid type conversion: {error}. Please check your code and try again." + logger.error(msg) + raise HTTPException( + status_code=400, + detail={"error": msg}, + ) def run_build_config( custom_component: CustomComponent, - user_id: Optional[Union[str, UUID]] = None, -) -> Tuple[dict, CustomComponent]: - """Build the field configuration for a custom component""" - - try: - if custom_component._code is None: - raise ValueError("Code is None") - elif isinstance(custom_component._code, str): + user_id: str | UUID | None = None, +) -> tuple[dict, CustomComponent]: + """Build the field configuration for a custom component.""" + if custom_component._code is None: + error = "Code is None" + elif not isinstance(custom_component._code, str): + error = "Invalid code type" + else: + try: custom_class = eval_custom_component_code(custom_component._code) - else: - raise ValueError("Invalid code type") - except Exception as exc: - logger.error(f"Error while evaluating custom component code: {str(exc)}") - raise HTTPException( - status_code=400, - detail={ - "error": ("Invalid type convertion. Please check your code and try again."), - "traceback": traceback.format_exc(), - }, - ) from exc - - try: - custom_instance = custom_class(_user_id=user_id) - build_config: Dict = custom_instance.build_config() - - for field_name, field in build_config.copy().items(): - # Allow user to build Input as well - # as a dict with the same keys as Input - field_dict = get_field_dict(field) - # Let's check if "rangeSpec" is a RangeSpec object - if "rangeSpec" in field_dict and isinstance(field_dict["rangeSpec"], RangeSpec): - field_dict["rangeSpec"] = field_dict["rangeSpec"].model_dump() - build_config[field_name] = field_dict - + except Exception as exc: + logger.exception("Error while evaluating custom component code") + raise HTTPException( + status_code=400, + detail={ + "error": ("Invalid type conversion. Please check your code and try again."), + "traceback": traceback.format_exc(), + }, + ) from exc + + try: + custom_instance = custom_class(_user_id=user_id) + build_config: dict = custom_instance.build_config() + + for field_name, field in build_config.copy().items(): + # Allow user to build Input as well + # as a dict with the same keys as Input + field_dict = get_field_dict(field) + # Let's check if "rangeSpec" is a RangeSpec object + if "rangeSpec" in field_dict and isinstance(field_dict["rangeSpec"], RangeSpec): + field_dict["rangeSpec"] = field_dict["rangeSpec"].model_dump() + build_config[field_name] = field_dict + + except Exception as exc: + logger.exception("Error while building field config") + if hasattr(exc, "detail") and "traceback" in exc.detail: + logger.error(exc.detail["traceback"]) + raise return build_config, custom_instance - except Exception as exc: - logger.error(f"Error while building field config: {str(exc)}") - if hasattr(exc, "detail") and "traceback" in exc.detail: - logger.error(exc.detail["traceback"]) - - raise exc + msg = f"Invalid type conversion: {error}. Please check your code and try again." + logger.error(msg) + raise HTTPException( + status_code=400, + detail={"error": msg}, + ) def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code): @@ -358,7 +362,7 @@ def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code): def build_custom_component_template_from_inputs( - custom_component: Union[Component, CustomComponent], user_id: Optional[Union[str, UUID]] = None + custom_component: Component | CustomComponent, user_id: str | UUID | None = None ): # The List of Inputs fills the role of the build_config and the entrypoint_args cc_instance = get_component_instance(custom_component, user_id=user_id) @@ -384,17 +388,27 @@ def build_custom_component_template_from_inputs( def build_custom_component_template( custom_component: CustomComponent, - user_id: Optional[Union[str, UUID]] = None, -) -> Tuple[Dict[str, Any], CustomComponent | Component]: - """Build a custom component template""" + user_id: str | UUID | None = None, +) -> tuple[dict[str, Any], CustomComponent | Component]: + """Build a custom component template.""" + try: + has_template_config = hasattr(custom_component, "template_config") + except Exception as exc: + raise HTTPException( + status_code=400, + detail={ + "error": (f"Error building Component: {exc}"), + "traceback": traceback.format_exc(), + }, + ) from exc + if not has_template_config: + raise HTTPException( + status_code=400, + detail={ + "error": ("Error building Component. Please check if you are importing Component correctly."), + }, + ) try: - if not hasattr(custom_component, "template_config"): - raise HTTPException( - status_code=400, - detail={ - "error": ("Please check if you are importing Component correctly."), - }, - ) if "inputs" in custom_component.template_config: return build_custom_component_template_from_inputs(custom_component, user_id=user_id) frontend_node = CustomComponentFrontendNode(**custom_component.template_config) @@ -410,19 +424,19 @@ def build_custom_component_template( frontend_node = add_code_field(frontend_node, custom_component._code) - add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type) - add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type) + add_base_classes(frontend_node, custom_component._get_function_entrypoint_return_type) + add_output_types(frontend_node, custom_component._get_function_entrypoint_return_type) reorder_fields(frontend_node, custom_instance._get_field_order()) return frontend_node.to_dict(keep_name=False), custom_instance except Exception as exc: if isinstance(exc, HTTPException): - raise exc + raise raise HTTPException( status_code=400, detail={ - "error": (f"Error building Component: {str(exc)}"), + "error": (f"Error building Component: {exc}"), "traceback": traceback.format_exc(), }, ) from exc @@ -442,7 +456,7 @@ def create_component_template(component): return component_template, component_instance -def build_custom_components(components_paths: List[str]): +def build_custom_components(components_paths: list[str]): """Build custom components from the specified paths.""" if not components_paths: return {} @@ -467,7 +481,7 @@ def build_custom_components(components_paths: List[str]): return custom_components_from_file -async def abuild_custom_components(components_paths: List[str]): +async def abuild_custom_components(components_paths: list[str]): """Build custom components from the specified paths.""" if not components_paths: return {} @@ -494,41 +508,43 @@ async def abuild_custom_components(components_paths: List[str]): def update_field_dict( custom_component_instance: "CustomComponent", - field_dict: Dict, - build_config: Dict, - update_field: Optional[str] = None, - update_field_value: Optional[Any] = None, + field_dict: dict, + build_config: dict, + *, + update_field: str | None = None, + update_field_value: Any | None = None, call: bool = False, ): - """Update the field dictionary by calling options() or value() if they are callable""" - if ("real_time_refresh" in field_dict or "refresh_button" in field_dict) and any( - ( - field_dict.get("real_time_refresh", False), - field_dict.get("refresh_button", False), + """Update the field dictionary by calling options() or value() if they are callable.""" + if ( + ("real_time_refresh" in field_dict or "refresh_button" in field_dict) + and any( + ( + field_dict.get("real_time_refresh", False), + field_dict.get("refresh_button", False), + ) ) + and call ): - if call: - try: - dd_build_config = dotdict(build_config) - custom_component_instance.update_build_config( - build_config=dd_build_config, - field_value=update_field, - field_name=update_field_value, - ) - build_config = dd_build_config - except Exception as exc: - logger.error(f"Error while running update_build_config: {str(exc)}") - raise UpdateBuildConfigError(f"Error while running update_build_config: {str(exc)}") from exc + try: + dd_build_config = dotdict(build_config) + custom_component_instance.update_build_config( + build_config=dd_build_config, + field_value=update_field, + field_name=update_field_value, + ) + build_config = dd_build_config + except Exception as exc: + msg = f"Error while running update_build_config: {exc}" + logger.exception(msg) + raise UpdateBuildConfigError(msg) from exc return build_config -def sanitize_field_config(field_config: Union[Dict, Input]): +def sanitize_field_config(field_config: dict | Input): # If any of the already existing keys are in field_config, remove them - if isinstance(field_config, Input): - field_dict = field_config.to_dict() - else: - field_dict = field_config + field_dict = field_config.to_dict() if isinstance(field_config, Input) else field_config for key in [ "name", "field_type", @@ -556,7 +572,7 @@ def build_component(component): def get_function(code): - """Get the function""" + """Get the function.""" function_name = validate.extract_function_name(code) return validate.create_function(code, function_name) diff --git a/src/backend/base/langflow/events/__init__.py b/src/backend/base/langflow/events/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/events/event_manager.py b/src/backend/base/langflow/events/event_manager.py new file mode 100644 index 000000000000..2400a36c3099 --- /dev/null +++ b/src/backend/base/langflow/events/event_manager.py @@ -0,0 +1,95 @@ +import asyncio +import inspect +import json +import time +import uuid +from functools import partial +from typing import Literal + +from fastapi.encoders import jsonable_encoder +from loguru import logger +from typing_extensions import Protocol + +from langflow.schema.log import LoggableType +from langflow.schema.playground_events import create_event_by_type + + +class EventCallback(Protocol): + def __call__(self, *, manager: "EventManager", event_type: str, data: LoggableType): ... + + +class PartialEventCallback(Protocol): + def __call__(self, *, data: LoggableType): ... + + +class EventManager: + def __init__(self, queue: asyncio.Queue): + self.queue = queue + self.events: dict[str, PartialEventCallback] = {} + + @staticmethod + def _validate_callback(callback: EventCallback) -> None: + if not callable(callback): + msg = "Callback must be callable" + raise TypeError(msg) + # Check if it has `self, event_type and data` + sig = inspect.signature(callback) + parameters = ["manager", "event_type", "data"] + if len(sig.parameters) != len(parameters): + msg = "Callback must have exactly 3 parameters" + raise ValueError(msg) + if not all(param.name in parameters for param in sig.parameters.values()): + msg = "Callback must have exactly 3 parameters: manager, event_type, and data" + raise ValueError(msg) + + def register_event( + self, + name: str, + event_type: Literal["message", "error", "warning", "info", "token"], + callback: EventCallback | None = None, + ) -> None: + if not name: + msg = "Event name cannot be empty" + raise ValueError(msg) + if not name.startswith("on_"): + msg = "Event name must start with 'on_'" + raise ValueError(msg) + if callback is None: + _callback = partial(self.send_event, event_type=event_type) + else: + _callback = partial(callback, manager=self, event_type=event_type) + self.events[name] = _callback + + def send_event(self, *, event_type: Literal["message", "error", "warning", "info", "token"], data: LoggableType): + try: + if isinstance(data, dict) and event_type in ["message", "error", "warning", "info", "token"]: + data = create_event_by_type(event_type, **data) + except TypeError as e: + logger.debug(f"Error creating playground event: {e}") + except Exception: + raise + jsonable_data = jsonable_encoder(data) + json_data = {"event": event_type, "data": jsonable_data} + event_id = f"{event_type}-{uuid.uuid4()}" + str_data = json.dumps(json_data) + "\n\n" + self.queue.put_nowait((event_id, str_data.encode("utf-8"), time.time())) + + def noop(self, *, data: LoggableType) -> None: + pass + + def __getattr__(self, name: str) -> PartialEventCallback: + return self.events.get(name, self.noop) + + +def create_default_event_manager(queue): + manager = EventManager(queue) + manager.register_event("on_token", "token") + manager.register_event("on_vertices_sorted", "vertices_sorted") + manager.register_event("on_error", "error") + manager.register_event("on_end", "end") + manager.register_event("on_message", "add_message") + manager.register_event("on_remove_message", "remove_message") + manager.register_event("on_end_vertex", "end_vertex") + manager.register_event("on_build_start", "build_start") + manager.register_event("on_build_end", "build_end") + return manager diff --git a/src/backend/base/langflow/exceptions/api.py b/src/backend/base/langflow/exceptions/api.py index 70003b89b9c3..1997ad1577fc 100644 --- a/src/backend/base/langflow/exceptions/api.py +++ b/src/backend/base/langflow/exceptions/api.py @@ -1,11 +1,12 @@ from fastapi import HTTPException +from pydantic import BaseModel + from langflow.api.utils import get_suggestion_message from langflow.services.database.models.flow.model import Flow from langflow.services.database.models.flow.utils import get_outdated_components -from pydantic import BaseModel -class InvalidChatInputException(Exception): +class InvalidChatInputError(Exception): pass @@ -30,5 +31,4 @@ def build_exception_body(exc: str | list[str] | Exception, flow: Flow | None) -> outdated_components = get_outdated_components(flow) if outdated_components: body["suggestion"] = get_suggestion_message(outdated_components) - excep = ExceptionBody(**body) - return excep + return ExceptionBody(**body) diff --git a/src/backend/base/langflow/exceptions/component.py b/src/backend/base/langflow/exceptions/component.py index a4a5575651e1..1f61ab91039a 100644 --- a/src/backend/base/langflow/exceptions/component.py +++ b/src/backend/base/langflow/exceptions/component.py @@ -1,6 +1,17 @@ # Create an exception class that receives the message and the formatted traceback -class ComponentBuildException(Exception): + +from langflow.schema.properties import Source + + +class ComponentBuildError(Exception): def __init__(self, message: str, formatted_traceback: str): self.message = message self.formatted_traceback = formatted_traceback super().__init__(message) + + +class StreamingError(Exception): + def __init__(self, cause: Exception, source: Source): + self.cause = cause + self.source = source + super().__init__(cause) diff --git a/src/backend/base/langflow/field_typing/__init__.py b/src/backend/base/langflow/field_typing/__init__.py index e387c4c8ddd5..5ac0502ad3b5 100644 --- a/src/backend/base/langflow/field_typing/__init__.py +++ b/src/backend/base/langflow/field_typing/__init__.py @@ -18,6 +18,7 @@ Data, Document, Embeddings, + LanguageModel, NestedDict, Object, PromptTemplate, @@ -26,7 +27,6 @@ TextSplitter, Tool, VectorStore, - LanguageModel, ) from .range_spec import RangeSpec @@ -48,7 +48,7 @@ def __getattr__(name: str) -> Any: if name == "Input": return _import_input_class() return RangeSpec - elif name == "Output": + if name == "Output": return _import_output_class() # The other names should work as if they were imported from constants # Import the constants module langflow.field_typing.constants @@ -60,8 +60,9 @@ def __getattr__(name: str) -> Any: __all__ = [ "AgentExecutor", "BaseChatMemory", - "BaseLanguageModel", + "BaseChatModel", "BaseLLM", + "BaseLanguageModel", "BaseLoader", "BaseMemory", "BaseOutputParser", @@ -75,15 +76,14 @@ def __getattr__(name: str) -> Any: "Document", "Embeddings", "Input", + "LanguageModel", "NestedDict", "Object", "PromptTemplate", "RangeSpec", + "Retriever", + "Text", "TextSplitter", "Tool", "VectorStore", - "BaseChatModel", - "Retriever", - "Text", - "LanguageModel", ] diff --git a/src/backend/base/langflow/field_typing/constants.py b/src/backend/base/langflow/field_typing/constants.py index dfa8309e71cd..35bb20a227b7 100644 --- a/src/backend/base/langflow/field_typing/constants.py +++ b/src/backend/base/langflow/field_typing/constants.py @@ -1,4 +1,5 @@ -from typing import Callable, Dict, Text, TypeAlias, TypeVar, Union +from collections.abc import Callable +from typing import Text, TypeAlias, TypeVar from langchain.agents.agent import AgentExecutor from langchain.chains.base import Chain @@ -10,7 +11,7 @@ from langchain_core.language_models import BaseLanguageModel, BaseLLM from langchain_core.language_models.chat_models import BaseChatModel from langchain_core.memory import BaseMemory -from langchain_core.output_parsers import BaseOutputParser +from langchain_core.output_parsers import BaseLLMOutputParser, BaseOutputParser from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate from langchain_core.retrievers import BaseRetriever from langchain_core.tools import BaseTool, Tool @@ -20,7 +21,7 @@ from langflow.schema.data import Data from langflow.schema.message import Message -NestedDict: TypeAlias = Dict[str, Union[str, Dict]] +NestedDict: TypeAlias = dict[str, str | dict] LanguageModel = TypeVar("LanguageModel", BaseLanguageModel, BaseLLM, BaseChatModel) ToolEnabledLanguageModel = TypeVar("ToolEnabledLanguageModel", BaseLanguageModel, BaseLLM, BaseChatModel) Retriever = TypeVar( @@ -28,6 +29,11 @@ BaseRetriever, VectorStoreRetriever, ) +OutputParser = TypeVar( + "OutputParser", + BaseOutputParser, + BaseLLMOutputParser, +) class Object: @@ -66,7 +72,7 @@ class Code: "NestedDict": NestedDict, "Data": Data, "Message": Message, - "Text": Text, + "Text": Text, # noqa: UP019 "Object": Object, "Callable": Callable, "LanguageModel": LanguageModel, diff --git a/src/backend/base/langflow/field_typing/range_spec.py b/src/backend/base/langflow/field_typing/range_spec.py index 78ed2e4353bf..eabd1c07a163 100644 --- a/src/backend/base/langflow/field_typing/range_spec.py +++ b/src/backend/base/langflow/field_typing/range_spec.py @@ -11,18 +11,21 @@ class RangeSpec(BaseModel): @field_validator("max") @classmethod - def max_must_be_greater_than_min(cls, v, values, **kwargs): + def max_must_be_greater_than_min(cls, v, values): if "min" in values.data and v <= values.data["min"]: - raise ValueError("Max must be greater than min") + msg = "Max must be greater than min" + raise ValueError(msg) return v @field_validator("step") @classmethod - def step_must_be_positive(cls, v, values, **kwargs): + def step_must_be_positive(cls, v, values): if v <= 0: - raise ValueError("Step must be positive") + msg = "Step must be positive" + raise ValueError(msg) if values.data["step_type"] == "int" and isinstance(v, float) and not v.is_integer(): - raise ValueError("When step_type is int, step must be an integer") + msg = "When step_type is int, step must be an integer" + raise ValueError(msg) return v @classmethod diff --git a/src/backend/base/langflow/graph/__init__.py b/src/backend/base/langflow/graph/__init__.py index bb93f92cfbdd..741f7468cf2c 100644 --- a/src/backend/base/langflow/graph/__init__.py +++ b/src/backend/base/langflow/graph/__init__.py @@ -3,4 +3,4 @@ from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.types import CustomComponentVertex, InterfaceVertex, StateVertex -__all__ = ["Edge", "Graph", "Vertex", "CustomComponentVertex", "InterfaceVertex", "StateVertex"] +__all__ = ["CustomComponentVertex", "Edge", "Graph", "InterfaceVertex", "StateVertex", "Vertex"] diff --git a/src/backend/base/langflow/graph/edge/base.py b/src/backend/base/langflow/graph/edge/base.py index 9a1e8c7965ed..13f257cba582 100644 --- a/src/backend/base/langflow/graph/edge/base.py +++ b/src/backend/base/langflow/graph/edge/base.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING, Any, cast from loguru import logger @@ -10,7 +12,7 @@ class Edge: - def __init__(self, source: "Vertex", target: "Vertex", edge: EdgeData): + def __init__(self, source: Vertex, target: Vertex, edge: EdgeData): self.source_id: str = source.id if source else "" self.target_id: str = target.id if target else "" self.valid_handles: bool = False @@ -28,28 +30,31 @@ def __init__(self, source: "Vertex", target: "Vertex", edge: EdgeData): except Exception as e: if "inputTypes" in self._target_handle and self._target_handle["inputTypes"] is None: # Check if self._target_handle['fieldName'] - if hasattr(target, "_custom_component"): - display_name = getattr(target._custom_component, "display_name", "") - raise ValueError( - f"Component {display_name} field '{self._target_handle['fieldName']}' might not be a valid input." - ) from e - else: - raise ValueError( - f"Field '{self._target_handle['fieldName']}' on {target.display_name} might not be a valid input." - ) from e - else: - raise e + if hasattr(target, "custom_component"): + display_name = getattr(target.custom_component, "display_name", "") + msg = ( + f"Component {display_name} field '{self._target_handle['fieldName']}' " + "might not be a valid input." + ) + raise ValueError(msg) from e + msg = ( + f"Field '{self._target_handle['fieldName']}' on {target.display_name} " + "might not be a valid input." + ) + raise ValueError(msg) from e + raise else: - raise ValueError("Target handle is not a dictionary") + msg = "Target handle is not a dictionary" + raise ValueError(msg) self.target_param = self.target_handle.field_name # validate handles self.validate_handles(source, target) else: # Logging here because this is a breaking change logger.error("Edge data is empty") - self._source_handle = edge.get("sourceHandle", "") # type: ignore - self._target_handle = edge.get("targetHandle", "") # type: ignore + self._source_handle = edge.get("sourceHandle", "") # type: ignore[assignment] + self._target_handle = edge.get("targetHandle", "") # type: ignore[assignment] # 'BaseLoader;BaseOutputParser|documents|PromptTemplate-zmTlD' # target_param is documents if isinstance(self._target_handle, str): @@ -57,7 +62,8 @@ def __init__(self, source: "Vertex", target: "Vertex", edge: EdgeData): self.source_handle = None self.target_handle = None else: - raise ValueError("Target handle is not a string") + msg = "Target handle is not a string" + raise ValueError(msg) # Validate in __init__ to fail fast self.validate_edge(source, target) @@ -83,7 +89,8 @@ def _validate_handles(self, source, target) -> None: if not self.valid_handles: logger.debug(self.source_handle) logger.debug(self.target_handle) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles") + msg = f"Edge between {source.display_name} and {target.display_name} has invalid handles" + raise ValueError(msg) def _legacy_validate_handles(self, source, target) -> None: if self.target_handle.input_types is None: @@ -96,7 +103,8 @@ def _legacy_validate_handles(self, source, target) -> None: if not self.valid_handles: logger.debug(self.source_handle) logger.debug(self.target_handle) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} has invalid handles" + raise ValueError(msg) def __setstate__(self, state): self.source_id = state["source_id"] @@ -152,7 +160,8 @@ def _validate_edge(self, source, target) -> None: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type. ") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} has no matched type." + raise ValueError(msg) def _legacy_validate_edge(self, source, target) -> None: # Validate that the outputs of the source node are valid inputs @@ -173,7 +182,8 @@ def _legacy_validate_edge(self, source, target) -> None: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type") + msg = f"Edge between {source.vertex_type} and {target.vertex_type} has no matched type" + raise ValueError(msg) def __repr__(self) -> str: if (hasattr(self, "source_handle") and self.source_handle) and ( @@ -199,15 +209,17 @@ def __str__(self) -> str: class CycleEdge(Edge): - def __init__(self, source: "Vertex", target: "Vertex", raw_edge: EdgeData): + def __init__(self, source: Vertex, target: Vertex, raw_edge: EdgeData): super().__init__(source, target, raw_edge) self.is_fulfilled = False # Whether the contract has been fulfilled. self.result: Any = None self.is_cycle = True + source.has_cycle_edges = True + target.has_cycle_edges = True + + async def honor(self, source: Vertex, target: Vertex) -> None: + """Fulfills the contract by setting the result of the source vertex to the target vertex's parameter. - async def honor(self, source: "Vertex", target: "Vertex") -> None: - """ - Fulfills the contract by setting the result of the source vertex to the target vertex's parameter. If the edge is runnable, the source vertex is run with the message text and the target vertex's root_field param is set to the result. If the edge is not runnable, the target vertex's parameter is set to the result. @@ -216,29 +228,35 @@ async def honor(self, source: "Vertex", target: "Vertex") -> None: if self.is_fulfilled: return - if not source._built: + if not source.built: # The system should be read-only, so we should not be building vertices # that are not already built. - raise ValueError(f"Source vertex {source.id} is not built.") + msg = f"Source vertex {source.id} is not built." + raise ValueError(msg) if self.matched_type == "Text": - self.result = source._built_result + self.result = source.built_result else: - self.result = source._built_object + self.result = source.built_object target.params[self.target_param] = self.result self.is_fulfilled = True - async def get_result_from_source(self, source: "Vertex", target: "Vertex"): + async def get_result_from_source(self, source: Vertex, target: Vertex): # Fulfill the contract if it has not been fulfilled. if not self.is_fulfilled: await self.honor(source, target) # If the target vertex is a power component we log messages - if target.vertex_type == "ChatOutput" and ( - isinstance(target.params.get(INPUT_FIELD_NAME), str) - or isinstance(target.params.get(INPUT_FIELD_NAME), dict) + if ( + target.vertex_type == "ChatOutput" + and isinstance(target.params.get(INPUT_FIELD_NAME), str | dict) + and target.params.get("message") == "" ): - if target.params.get("message") == "": - return self.result + return self.result return self.result + + def __repr__(self) -> str: + str_repr = super().__repr__() + # Add a symbol to show this is a cycle edge + return f"{str_repr} 🔄" diff --git a/src/backend/base/langflow/graph/edge/schema.py b/src/backend/base/langflow/graph/edge/schema.py index 7e0f04108ac1..3973841eaa2e 100644 --- a/src/backend/base/langflow/graph/edge/schema.py +++ b/src/backend/base/langflow/graph/edge/schema.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import Any from pydantic import ConfigDict, Field, field_validator from typing_extensions import TypedDict @@ -12,12 +12,12 @@ class ResultPair(BaseModel): class Payload(BaseModel): - result_pairs: List[ResultPair] = [] + result_pairs: list[ResultPair] = [] def __iter__(self): return iter(self.result_pairs) - def add_result_pair(self, result: Any, extra: Optional[Any] = None) -> None: + def add_result_pair(self, result: Any, extra: Any | None = None) -> None: self.result_pairs.append(ResultPair(result=result, extra=extra)) def get_last_result_pair(self) -> ResultPair: @@ -42,7 +42,7 @@ class TargetHandle(BaseModel): model_config = ConfigDict(populate_by_name=True) field_name: str = Field(..., alias="fieldName", description="Field name for the target handle.") id: str = Field(..., description="Unique identifier for the target handle.") - input_types: List[str] = Field( + input_types: list[str] = Field( default_factory=list, alias="inputTypes", description="List of input types for the target handle." ) type: str = Field(..., description="Type of the target handle.") @@ -55,8 +55,8 @@ class SourceHandle(BaseModel): ) data_type: str = Field(..., alias="dataType", description="Data type for the source handle.") id: str = Field(..., description="Unique identifier for the source handle.") - name: Optional[str] = Field(None, description="Name of the source handle.") - output_types: List[str] = Field(default_factory=list, description="List of output types for the source handle.") + name: str | None = Field(None, description="Name of the source handle.") + output_types: list[str] = Field(default_factory=list, description="List of output types for the source handle.") @field_validator("name", mode="before") @classmethod @@ -64,8 +64,9 @@ def validate_name(cls, v, _info): if _info.data["data_type"] == "GroupNode": # 'OpenAIModel-u4iGV_text_output' splits = v.split("_", 1) - if len(splits) != 2: - raise ValueError(f"Invalid source handle name {v}") + if len(splits) != 2: # noqa: PLR2004 + msg = f"Invalid source handle name {v}" + raise ValueError(msg) v = splits[1] return v @@ -74,14 +75,14 @@ class SourceHandleDict(TypedDict, total=False): baseClasses: list[str] dataType: str id: str - name: Optional[str] - output_types: List[str] + name: str | None + output_types: list[str] class TargetHandleDict(TypedDict): fieldName: str id: str - inputTypes: Optional[List[str]] + inputTypes: list[str] | None type: str diff --git a/src/backend/base/langflow/graph/graph/ascii.py b/src/backend/base/langflow/graph/graph/ascii.py index 5444c87ef9e5..93ebce8ec2cf 100644 --- a/src/backend/base/langflow/graph/graph/ascii.py +++ b/src/backend/base/langflow/graph/graph/ascii.py @@ -1,5 +1,4 @@ -""" -This code is adapted from the DVC project. +"""This code is adapted from the DVC project. Original source: https://github.com/iterative/dvc/blob/c5bac1c8cfdb2c0f54d52ac61ff754e6f583822a/dvc/dagascii.py @@ -19,13 +18,15 @@ from grandalf.layouts import SugiyamaLayout from grandalf.routing import EdgeViewer, route_with_lines +MINIMUM_EDGE_VIEW_POINTS = 2 + class VertexViewer: """Class to define vertex box boundaries that will be accounted for during graph building by grandalf.""" HEIGHT = 3 # top and bottom box edges + text - def __init__(self, name): + def __init__(self, name) -> None: self._h = self.HEIGHT # top and bottom box edges + text self._w = len(name) + 2 # right and left bottom edges + text @@ -41,9 +42,13 @@ def w(self): class AsciiCanvas: """Class for drawing in ASCII.""" - def __init__(self, cols, lines): - assert cols > 1 - assert lines > 1 + def __init__(self, cols, lines) -> None: + if cols <= 1: + msg = "cols must be greater than 1" + raise ValueError(msg) + if lines <= 1: + msg = "lines must be greater than 1" + raise ValueError(msg) self.cols = cols self.lines = lines self.canvas = [[" "] * cols for _ in range(lines)] @@ -54,19 +59,25 @@ def get_lines(self): def draws(self): return "\n".join(self.get_lines()) - def draw(self): + def draw(self) -> None: """Draws ASCII canvas on the screen.""" lines = self.get_lines() - print("\n".join(lines)) + print("\n".join(lines)) # noqa: T201 - def point(self, x, y, char): + def point(self, x, y, char) -> None: """Create a point on ASCII canvas.""" - assert len(char) == 1 - assert 0 <= x < self.cols - assert 0 <= y < self.lines + if len(char) != 1: + msg = "char must be a single character" + raise ValueError(msg) + if x < 0 or x >= self.cols: + msg = "x is out of bounds" + raise ValueError(msg) + if y < 0 or y >= self.lines: + msg = "y is out of bounds" + raise ValueError(msg) self.canvas[y][x] = char - def line(self, x0, y0, x1, y1, char): + def line(self, x0, y0, x1, y1, char) -> None: """Create a line on ASCII canvas.""" if x0 > x1: x1, x0 = x0, x1 @@ -86,15 +97,19 @@ def line(self, x0, y0, x1, y1, char): x = x0 + int(round((y - y0) * dx / float(dy))) if dy else x0 self.point(x, y, char) - def text(self, x, y, text): + def text(self, x, y, text) -> None: """Print a text on ASCII canvas.""" for i, char in enumerate(text): self.point(x + i, y, char) - def box(self, x0, y0, width, height): + def box(self, x0, y0, width, height) -> None: """Create a box on ASCII canvas.""" - assert width > 1 - assert height > 1 + if width <= 1: + msg = "width must be greater than 1" + raise ValueError(msg) + if height <= 1: + msg = "height must be greater than 1" + raise ValueError(msg) width -= 1 height -= 1 @@ -118,7 +133,7 @@ def build_sugiyama_layout(vertexes, edges): for vertex in vertexes.values(): vertex.view = VertexViewer(vertex.data) - minw = min([v.view.w for v in vertexes.values()]) + minw = min(v.view.w for v in vertexes.values()) for edge in edges: edge.view = EdgeViewer() @@ -135,26 +150,26 @@ def build_sugiyama_layout(vertexes, edges): return sug -def draw_graph(vertexes, edges, return_ascii=True): +def draw_graph(vertexes, edges, *, return_ascii=True): """Build a DAG and draw it in ASCII.""" sug = build_sugiyama_layout(vertexes, edges) - Xs = [] - Ys = [] + xlist = [] + ylist = [] for vertex in sug.g.sV: - Xs.extend([vertex.view.xy[0] - vertex.view.w / 2.0, vertex.view.xy[0] + vertex.view.w / 2.0]) - Ys.extend([vertex.view.xy[1], vertex.view.xy[1] + vertex.view.h]) + xlist.extend([vertex.view.xy[0] - vertex.view.w / 2.0, vertex.view.xy[0] + vertex.view.w / 2.0]) + ylist.extend([vertex.view.xy[1], vertex.view.xy[1] + vertex.view.h]) for edge in sug.g.sE: for x, y in edge.view._pts: - Xs.append(x) - Ys.append(y) + xlist.append(x) + ylist.append(y) - minx = min(Xs) - miny = min(Ys) - maxx = max(Xs) - maxy = max(Ys) + minx = min(xlist) + miny = min(ylist) + maxx = max(xlist) + maxy = max(ylist) canvas_cols = int(math.ceil(maxx - minx)) + 1 canvas_lines = int(round(maxy - miny)) @@ -162,7 +177,9 @@ def draw_graph(vertexes, edges, return_ascii=True): canvas = AsciiCanvas(canvas_cols, canvas_lines) for edge in sug.g.sE: - assert len(edge.view._pts) > 1 + if len(edge.view._pts) < MINIMUM_EDGE_VIEW_POINTS: + msg = "edge.view._pts must have at least 2 points" + raise ValueError(msg) for index in range(1, len(edge.view._pts)): start = edge.view._pts[index - 1] end = edge.view._pts[index] @@ -181,5 +198,5 @@ def draw_graph(vertexes, edges, return_ascii=True): canvas.text(int(round(x - minx)) + 1, int(round(y - miny)) + 1, vertex.data) if return_ascii: return canvas.draws() - else: - canvas.draw() + canvas.draw() + return None diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index 20f74d41b971..f064e65beb5c 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -1,41 +1,54 @@ +from __future__ import annotations + import asyncio +import contextlib import copy import json +import queue +import threading import uuid -import warnings from collections import defaultdict, deque +from collections.abc import Generator, Iterable from datetime import datetime, timezone from functools import partial from itertools import chain -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, cast -import nest_asyncio from loguru import logger -from langflow.exceptions.component import ComponentBuildException -from langflow.graph.edge.base import CycleEdge -from langflow.graph.edge.schema import EdgeData +from langflow.exceptions.component import ComponentBuildError +from langflow.graph.edge.base import CycleEdge, Edge from langflow.graph.graph.constants import Finish, lazy_load_vertex_dict from langflow.graph.graph.runnable_vertices_manager import RunnableVerticesManager from langflow.graph.graph.schema import GraphData, GraphDump, StartConfigDict, VertexBuildResult from langflow.graph.graph.state_manager import GraphStateManager from langflow.graph.graph.state_model import create_state_model_from_graph -from langflow.graph.graph.utils import find_start_component_id, process_flow, should_continue, sort_up_to_vertex +from langflow.graph.graph.utils import ( + find_all_cycle_edges, + find_cycle_vertices, + find_start_component_id, + process_flow, + should_continue, + sort_up_to_vertex, +) from langflow.graph.schema import InterfaceComponentTypes, RunOutputs from langflow.graph.vertex.base import Vertex, VertexStates -from langflow.graph.vertex.schema import NodeData +from langflow.graph.vertex.schema import NodeData, NodeTypeEnum from langflow.graph.vertex.types import ComponentVertex, InterfaceVertex, StateVertex from langflow.logging.logger import LogConfig, configure -from langflow.schema import Data +from langflow.schema.dotdict import dotdict from langflow.schema.schema import INPUT_FIELD_NAME, InputType from langflow.services.cache.utils import CacheMiss -from langflow.services.chat.schema import GetCache, SetCache from langflow.services.deps import get_chat_service, get_tracing_service if TYPE_CHECKING: from langflow.api.v1.schemas import InputValueRequest from langflow.custom.custom_component.component import Component + from langflow.events.event_manager import EventManager + from langflow.graph.edge.schema import EdgeData from langflow.graph.schema import ResultData + from langflow.schema import Data + from langflow.services.chat.schema import GetCache, SetCache from langflow.services.tracing.service import TracingService @@ -44,24 +57,30 @@ class Graph: def __init__( self, - start: Optional["Component"] = None, - end: Optional["Component"] = None, - flow_id: Optional[str] = None, - flow_name: Optional[str] = None, - description: Optional[str] = None, - user_id: Optional[str] = None, - log_config: Optional[LogConfig] = None, + start: Component | None = None, + end: Component | None = None, + flow_id: str | None = None, + flow_name: str | None = None, + description: str | None = None, + user_id: str | None = None, + log_config: LogConfig | None = None, + context: dict[str, Any] | None = None, ) -> None: - """ - Initializes a new instance of the Graph class. + """Initializes a new instance of the Graph class. Args: - nodes (List[Dict]): A list of dictionaries representing the vertices of the graph. - edges (List[Dict[str, str]]): A list of dictionaries representing the edges of the graph. - flow_id (Optional[str], optional): The ID of the flow. Defaults to None. + start: The start component. + end: The end component. + flow_id: The ID of the flow. Defaults to None. + flow_name: The flow name. + description: The graph description. + user_id: The user ID. + log_config: The log configuration. + context: Additional context for the graph. Defaults to None. """ if log_config: configure(**log_config) + self._start = start self._state_model = None self._end = end @@ -72,49 +91,82 @@ def __init__( self.flow_name = flow_name self.description = description self.user_id = user_id - self._is_input_vertices: List[str] = [] - self._is_output_vertices: List[str] = [] - self._is_state_vertices: List[str] = [] - self._has_session_id_vertices: List[str] = [] - self._sorted_vertices_layers: List[List[str]] = [] + self._is_input_vertices: list[str] = [] + self._is_output_vertices: list[str] = [] + self._is_state_vertices: list[str] = [] + self.has_session_id_vertices: list[str] = [] + self._sorted_vertices_layers: list[list[str]] = [] self._run_id = "" + self._session_id = "" self._start_time = datetime.now(timezone.utc) self.inactivated_vertices: set = set() - self.activated_vertices: List[str] = [] - self.vertices_layers: List[List[str]] = [] + self.activated_vertices: list[str] = [] + self.vertices_layers: list[list[str]] = [] self.vertices_to_run: set[str] = set() - self.stop_vertex: Optional[str] = None + self.stop_vertex: str | None = None self.inactive_vertices: set = set() - self.edges: List[CycleEdge] = [] - self.vertices: List[Vertex] = [] + self.edges: list[CycleEdge] = [] + self.vertices: list[Vertex] = [] self.run_manager = RunnableVerticesManager() self.state_manager = GraphStateManager() - self._vertices: List[NodeData] = [] - self._edges: List[EdgeData] = [] - self.top_level_vertices: List[str] = [] - self.vertex_map: Dict[str, Vertex] = {} - self.predecessor_map: Dict[str, List[str]] = defaultdict(list) - self.successor_map: Dict[str, List[str]] = defaultdict(list) - self.in_degree_map: Dict[str, int] = defaultdict(int) - self.parent_child_map: Dict[str, List[str]] = defaultdict(list) + self._vertices: list[NodeData] = [] + self._edges: list[EdgeData] = [] + + self.top_level_vertices: list[str] = [] + self.vertex_map: dict[str, Vertex] = {} + self.predecessor_map: dict[str, list[str]] = defaultdict(list) + self.successor_map: dict[str, list[str]] = defaultdict(list) + self.in_degree_map: dict[str, int] = defaultdict(int) + self.parent_child_map: dict[str, list[str]] = defaultdict(list) self._run_queue: deque[str] = deque() - self._first_layer: List[str] = [] + self._first_layer: list[str] = [] self._lock = asyncio.Lock() self.raw_graph_data: GraphData = {"nodes": [], "edges": []} - self._is_cyclic: Optional[bool] = None - self._cycles: Optional[List[tuple[str, str]]] = None - self._call_order: List[str] = [] - self._snapshots: List[Dict[str, Any]] = [] + self._is_cyclic: bool | None = None + self._cycles: list[tuple[str, str]] | None = None + self._cycle_vertices: set[str] | None = None + self._call_order: list[str] = [] + self._snapshots: list[dict[str, Any]] = [] + self._end_trace_tasks: set[asyncio.Task] = set() + + if context and not isinstance(context, dict): + msg = "Context must be a dictionary" + raise TypeError(msg) + self._context = dotdict(context or {}) try: - self.tracing_service: "TracingService" | None = get_tracing_service() - except Exception as exc: - logger.error(f"Error getting tracing service: {exc}") + self.tracing_service: TracingService | None = get_tracing_service() + except Exception: # noqa: BLE001 + logger.exception("Error getting tracing service") self.tracing_service = None if start is not None and end is not None: self._set_start_and_end(start, end) self.prepare(start_component_id=start._id) if (start is not None and end is None) or (start is None and end is not None): - raise ValueError("You must provide both input and output components") + msg = "You must provide both input and output components" + raise ValueError(msg) + + @property + def context(self) -> dotdict: + if isinstance(self._context, dotdict): + return self._context + return dotdict(self._context) + + @context.setter + def context(self, value: dict[str, Any]): + if not isinstance(value, dict): + msg = "Context must be a dictionary" + raise TypeError(msg) + if isinstance(value, dict): + value = dotdict(value) + self._context = value + + @property + def session_id(self): + return self._session_id + + @session_id.setter + def session_id(self, value: str): + self._session_id = value @property def state_model(self): @@ -124,7 +176,8 @@ def state_model(self): def __add__(self, other): if not isinstance(other, Graph): - raise TypeError("Can only add Graph objects") + msg = "Can only add Graph objects" + raise TypeError(msg) # Add the vertices and edges from the other graph to this graph new_instance = copy.deepcopy(self) for vertex in other.vertices: @@ -136,7 +189,8 @@ def __add__(self, other): def __iadd__(self, other): if not isinstance(other, Graph): - raise TypeError("Can only add Graph objects") + msg = "Can only add Graph objects" + raise TypeError(msg) # Add the vertices and edges from the other graph to this graph for vertex in other.vertices: # This updates the edges as well @@ -147,15 +201,15 @@ def __iadd__(self, other): def dumps( self, - name: Optional[str] = None, - description: Optional[str] = None, - endpoint_name: Optional[str] = None, + name: str | None = None, + description: str | None = None, + endpoint_name: str | None = None, ) -> str: graph_dict = self.dump(name, description, endpoint_name) return json.dumps(graph_dict, indent=4, sort_keys=True) def dump( - self, name: Optional[str] = None, description: Optional[str] = None, endpoint_name: Optional[str] = None + self, name: str | None = None, description: str | None = None, endpoint_name: str | None = None ) -> GraphDump: if self.raw_graph_data != {"nodes": [], "edges": []}: data_dict = self.raw_graph_data @@ -180,7 +234,7 @@ def dump( graph_dict["endpoint_name"] = str(endpoint_name) return graph_dict - def add_nodes_and_edges(self, nodes: List[NodeData], edges: List[EdgeData]): + def add_nodes_and_edges(self, nodes: list[NodeData], edges: list[EdgeData]) -> None: self._vertices = nodes self._edges = edges self.raw_graph_data = {"nodes": nodes, "edges": edges} @@ -188,59 +242,83 @@ def add_nodes_and_edges(self, nodes: List[NodeData], edges: List[EdgeData]): for vertex in self._vertices: if vertex_id := vertex.get("id"): self.top_level_vertices.append(vertex_id) + if vertex_id in self.cycle_vertices: + self.run_manager.add_to_cycle_vertices(vertex_id) self._graph_data = process_flow(self.raw_graph_data) self._vertices = self._graph_data["nodes"] self._edges = self._graph_data["edges"] self.initialize() - def add_component(self, _id: str, component: "Component"): - if _id in self.vertex_map: - return + def add_component(self, component: Component, component_id: str | None = None) -> str: + component_id = component_id or component._id + if component_id in self.vertex_map: + return component_id + component._id = component_id + if component_id in self.vertex_map: + msg = f"Component ID {component_id} already exists" + raise ValueError(msg) frontend_node = component.to_frontend_node() - frontend_node["data"]["id"] = _id - frontend_node["id"] = _id self._vertices.append(frontend_node) vertex = self._create_vertex(frontend_node) vertex.add_component_instance(component) - self.vertices.append(vertex) - self.vertex_map[_id] = vertex - + self._add_vertex(vertex) if component._edges: for edge in component._edges: self._add_edge(edge) if component._components: for _component in component._components: - self.add_component(_component._id, _component) + self.add_component(_component) - def _set_start_and_end(self, start: "Component", end: "Component"): + return component_id + + def _set_start_and_end(self, start: Component, end: Component) -> None: if not hasattr(start, "to_frontend_node"): - raise TypeError(f"start must be a Component. Got {type(start)}") + msg = f"start must be a Component. Got {type(start)}" + raise TypeError(msg) if not hasattr(end, "to_frontend_node"): - raise TypeError(f"end must be a Component. Got {type(end)}") - self.add_component(start._id, start) - self.add_component(end._id, end) + msg = f"end must be a Component. Got {type(end)}" + raise TypeError(msg) + self.add_component(start, start._id) + self.add_component(end, end._id) - def add_component_edge(self, source_id: str, output_input_tuple: Tuple[str, str], target_id: str): + def add_component_edge(self, source_id: str, output_input_tuple: tuple[str, str], target_id: str) -> None: source_vertex = self.get_vertex(source_id) if not isinstance(source_vertex, ComponentVertex): - raise ValueError(f"Source vertex {source_id} is not a component vertex.") + msg = f"Source vertex {source_id} is not a component vertex." + raise TypeError(msg) target_vertex = self.get_vertex(target_id) if not isinstance(target_vertex, ComponentVertex): - raise ValueError(f"Target vertex {target_id} is not a component vertex.") + msg = f"Target vertex {target_id} is not a component vertex." + raise TypeError(msg) output_name, input_name = output_input_tuple - if source_vertex._custom_component is None: - raise ValueError(f"Source vertex {source_id} does not have a custom component.") - if target_vertex._custom_component is None: - raise ValueError(f"Target vertex {target_id} does not have a custom component.") + if source_vertex.custom_component is None: + msg = f"Source vertex {source_id} does not have a custom component." + raise ValueError(msg) + if target_vertex.custom_component is None: + msg = f"Target vertex {target_id} does not have a custom component." + raise ValueError(msg) + + try: + input_field = target_vertex.get_input(input_name) + input_types = input_field.input_types + input_field_type = str(input_field.field_type) + except ValueError as e: + input_field = target_vertex.data.get("node", {}).get("template", {}).get(input_name) + if not input_field: + msg = f"Input field {input_name} not found in target vertex {target_id}" + raise ValueError(msg) from e + input_types = input_field.get("input_types", []) + input_field_type = input_field.get("type", "") + edge_data: EdgeData = { "source": source_id, "target": target_id, "data": { "sourceHandle": { - "dataType": source_vertex._custom_component.name - or source_vertex._custom_component.__class__.__name__, + "dataType": source_vertex.custom_component.name + or source_vertex.custom_component.__class__.__name__, "id": source_vertex.id, "name": output_name, "output_types": source_vertex.get_output(output_name).types, @@ -248,16 +326,22 @@ def add_component_edge(self, source_id: str, output_input_tuple: Tuple[str, str] "targetHandle": { "fieldName": input_name, "id": target_vertex.id, - "inputTypes": target_vertex.get_input(input_name).input_types, - "type": str(target_vertex.get_input(input_name).field_type), + "inputTypes": input_types, + "type": input_field_type, }, }, } self._add_edge(edge_data) - async def async_start(self, inputs: Optional[List[dict]] = None, max_iterations: Optional[int] = None): + async def async_start( + self, + inputs: list[dict] | None = None, + max_iterations: int | None = None, + event_manager: EventManager | None = None, + ): if not self._prepared: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) # The idea is for this to return a generator that yields the result of # each step call and raise StopIteration when the graph is done for _input in inputs or []: @@ -269,48 +353,117 @@ async def async_start(self, inputs: Optional[List[dict]] = None, max_iterations: yielded_counts: dict[str, int] = defaultdict(int) while should_continue(yielded_counts, max_iterations): - result = await self.astep() + result = await self.astep(event_manager=event_manager) yield result if hasattr(result, "vertex"): yielded_counts[result.vertex.id] += 1 if isinstance(result, Finish): return - raise ValueError("Max iterations reached") + msg = "Max iterations reached" + raise ValueError(msg) + + def _snapshot(self): + return { + "_run_queue": self._run_queue.copy(), + "_first_layer": self._first_layer.copy(), + "vertices_layers": copy.deepcopy(self.vertices_layers), + "vertices_to_run": copy.deepcopy(self.vertices_to_run), + "run_manager": copy.deepcopy(self.run_manager.to_dict()), + } - def __apply_config(self, config: StartConfigDict): + def __apply_config(self, config: StartConfigDict) -> None: for vertex in self.vertices: - if vertex._custom_component is None: + if vertex.custom_component is None: continue - for output in vertex._custom_component.outputs: + for output in vertex.custom_component._outputs_map.values(): for key, value in config["output"].items(): setattr(output, key, value) def start( self, - inputs: Optional[List[dict]] = None, - max_iterations: Optional[int] = None, - config: Optional[StartConfigDict] = None, + inputs: list[dict] | None = None, + max_iterations: int | None = None, + config: StartConfigDict | None = None, + event_manager: EventManager | None = None, ) -> Generator: + """Starts the graph execution synchronously by creating a new event loop in a separate thread. + + Args: + inputs: Optional list of input dictionaries + max_iterations: Optional maximum number of iterations + config: Optional configuration dictionary + event_manager: Optional event manager + + Returns: + Generator yielding results from graph execution + """ + if self.is_cyclic and max_iterations is None: + msg = "You must specify a max_iterations if the graph is cyclic" + raise ValueError(msg) + if config is not None: self.__apply_config(config) - #! Change this ASAP - nest_asyncio.apply() - loop = asyncio.get_event_loop() - async_gen = self.async_start(inputs, max_iterations) - async_gen_task = asyncio.ensure_future(async_gen.__anext__()) - while True: + # Create a queue for passing results and errors between threads + result_queue: queue.Queue[VertexBuildResult | Exception | None] = queue.Queue() + + # Function to run async code in separate thread + def run_async_code(): + # Create new event loop for this thread + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: - result = loop.run_until_complete(async_gen_task) - yield result - if isinstance(result, Finish): - return - async_gen_task = asyncio.ensure_future(async_gen.__anext__()) - except StopAsyncIteration: + # Run the async generator + async_gen = self.async_start(inputs, max_iterations, event_manager) + + while True: + try: + # Get next result from async generator + result = loop.run_until_complete(anext(async_gen)) + result_queue.put(result) + + if isinstance(result, Finish): + break + + except StopAsyncIteration: + break + except ValueError as e: + # Put the exception in the queue + result_queue.put(e) + break + + finally: + # Ensure all pending tasks are completed + pending = asyncio.all_tasks(loop) + if pending: + # Create a future to gather all pending tasks + cleanup_future = asyncio.gather(*pending, return_exceptions=True) + loop.run_until_complete(cleanup_future) + + # Close the loop + loop.close() + # Signal completion + result_queue.put(None) + + # Start thread for async execution + thread = threading.Thread(target=run_async_code) + thread.start() + + # Yield results from queue + while True: + result = result_queue.get() + if result is None: break + if isinstance(result, Exception): + raise result + yield result + + # Wait for thread to complete + thread.join() - def _add_edge(self, edge: EdgeData): + def _add_edge(self, edge: EdgeData) -> None: self.add_edge(edge) source_id = edge["data"]["sourceHandle"]["id"] target_id = edge["data"]["targetHandle"]["id"] @@ -319,23 +472,22 @@ def _add_edge(self, edge: EdgeData): self.in_degree_map[target_id] += 1 self.parent_child_map[source_id].append(target_id) - def add_node(self, node: NodeData): + def add_node(self, node: NodeData) -> None: self._vertices.append(node) - def add_edge(self, edge: EdgeData): + def add_edge(self, edge: EdgeData) -> None: # Check if the edge already exists if edge in self._edges: return self._edges.append(edge) - def initialize(self): + def initialize(self) -> None: self._build_graph() self.build_graph_maps(self.edges) self.define_vertices_lists() - def get_state(self, name: str) -> Optional[Data]: - """ - Returns the state of the graph with the given name. + def get_state(self, name: str) -> Data | None: + """Returns the state of the graph with the given name. Args: name (str): The name of the state. @@ -345,9 +497,8 @@ def get_state(self, name: str) -> Optional[Data]: """ return self.state_manager.get_state(name, run_id=self._run_id) - def update_state(self, name: str, record: Union[str, Data], caller: Optional[str] = None) -> None: - """ - Updates the state of the graph with the given name. + def update_state(self, name: str, record: str | Data, caller: str | None = None) -> None: + """Updates the state of the graph with the given name. Args: name (str): The name of the state. @@ -363,9 +514,8 @@ def update_state(self, name: str, record: Union[str, Data], caller: Optional[str self.state_manager.update_state(name, record, run_id=self._run_id) - def activate_state_vertices(self, name: str, caller: str): - """ - Activates the state vertices in the graph with the given name and caller. + def activate_state_vertices(self, name: str, caller: str) -> None: + """Activates the state vertices in the graph with the given name and caller. Args: name (str): The name of the state. @@ -379,8 +529,8 @@ def activate_state_vertices(self, name: str, caller: str): if vertex_id == caller or vertex.display_name == caller_vertex.display_name: continue if ( - isinstance(vertex._raw_params["name"], str) - and name in vertex._raw_params["name"] + isinstance(vertex.raw_params["name"], str) + and name in vertex.raw_params["name"] and vertex_id != caller and isinstance(vertex, StateVertex) ): @@ -393,7 +543,7 @@ def activate_state_vertices(self, name: str, caller: str): # and run self.build_adjacency_maps(edges) to get the new predecessor map # that is not complete but we can use to update the run_predecessors edges_set = set() - for _vertex in [vertex] + successors: + for _vertex in [vertex, *successors]: edges_set.update(_vertex.edges) if _vertex.state == VertexStates.INACTIVE: _vertex.set_state("ACTIVE") @@ -413,15 +563,12 @@ def activate_state_vertices(self, name: str, caller: str): vertices_to_run=self.vertices_to_run, ) - def reset_activated_vertices(self): - """ - Resets the activated vertices in the graph. - """ + def reset_activated_vertices(self) -> None: + """Resets the activated vertices in the graph.""" self.activated_vertices = [] - def append_state(self, name: str, record: Union[str, Data], caller: Optional[str] = None) -> None: - """ - Appends the state of the graph with the given name. + def append_state(self, name: str, record: str | Data, caller: str | None = None) -> None: + """Appends the state of the graph with the given name. Args: name (str): The name of the state. @@ -433,9 +580,8 @@ def append_state(self, name: str, record: Union[str, Data], caller: Optional[str self.state_manager.append_state(name, record, run_id=self._run_id) - def validate_stream(self): - """ - Validates the stream configuration of the graph. + def validate_stream(self) -> None: + """Validates the stream configuration of the graph. If there are two vertices in the same graph (connected by edges) that have `stream=True` or `streaming=True`, raises a `ValueError`. @@ -448,21 +594,33 @@ def validate_stream(self): successors = self.get_all_successors(vertex) for successor in successors: if successor.params.get("stream") or successor.params.get("streaming"): - raise ValueError( + msg = ( f"Components {vertex.display_name} and {successor.display_name} " "are connected and both have stream or streaming set to True" ) + raise ValueError(msg) @property def first_layer(self): if self._first_layer is None: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) return self._first_layer @property - def run_id(self): + def is_cyclic(self): + """Check if the graph has any cycles. + + Returns: + bool: True if the graph has any cycles, False otherwise. """ - The ID of the current run. + if self._is_cyclic is None: + self._is_cyclic = bool(self.cycle_vertices) + return self._is_cyclic + + @property + def run_id(self): + """The ID of the current run. Returns: str: The run ID. @@ -471,12 +629,12 @@ def run_id(self): ValueError: If the run ID is not set. """ if not self._run_id: - raise ValueError("Run ID not set") + msg = "Run ID not set" + raise ValueError(msg) return self._run_id - def set_run_id(self, run_id: uuid.UUID | None = None): - """ - Sets the ID of the current run. + def set_run_id(self, run_id: uuid.UUID | None = None) -> None: + """Sets the ID of the current run. Args: run_id (str): The run ID. @@ -491,7 +649,7 @@ def set_run_id(self, run_id: uuid.UUID | None = None): if self.tracing_service: self.tracing_service.set_run_id(run_id) - def set_run_name(self): + def set_run_name(self) -> None: # Given a flow name, flow_id if not self.tracing_service: return @@ -500,10 +658,16 @@ def set_run_name(self): self.set_run_id() self.tracing_service.set_run_name(name) - async def initialize_run(self): - await self.tracing_service.initialize_tracers() + async def initialize_run(self) -> None: + if self.tracing_service: + await self.tracing_service.initialize_tracers() - async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: Exception | None = None): + def _end_all_traces_async(self, outputs: dict[str, Any] | None = None, error: Exception | None = None) -> None: + task = asyncio.create_task(self.end_all_traces(outputs, error)) + self._end_trace_tasks.add(task) + task.add_done_callback(self._end_trace_tasks.discard) + + async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: Exception | None = None) -> None: if not self.tracing_service: return self._end_time = datetime.now(timezone.utc) @@ -513,9 +677,8 @@ async def end_all_traces(self, outputs: dict[str, Any] | None = None, error: Exc await self.tracing_service.end(outputs, error) @property - def sorted_vertices_layers(self) -> List[List[str]]: - """ - The sorted layers of vertices in the graph. + def sorted_vertices_layers(self) -> list[list[str]]: + """The sorted layers of vertices in the graph. Returns: List[List[str]]: The sorted layers of vertices. @@ -524,17 +687,19 @@ def sorted_vertices_layers(self) -> List[List[str]]: self.sort_vertices() return self._sorted_vertices_layers - def define_vertices_lists(self): - """ - Defines the lists of vertices that are inputs, outputs, and have session_id. - """ - attributes = ["is_input", "is_output", "has_session_id", "is_state"] + def define_vertices_lists(self) -> None: + """Defines the lists of vertices that are inputs, outputs, and have session_id.""" for vertex in self.vertices: - for attribute in attributes: - if getattr(vertex, attribute): - getattr(self, f"_{attribute}_vertices").append(vertex.id) - - def _set_inputs(self, input_components: list[str], inputs: Dict[str, str], input_type: InputType | None): + if vertex.is_input: + self._is_input_vertices.append(vertex.id) + if vertex.is_output: + self._is_output_vertices.append(vertex.id) + if vertex.has_session_id: + self.has_session_id_vertices.append(vertex.id) + if vertex.is_state: + self._is_state_vertices.append(vertex.id) + + def _set_inputs(self, input_components: list[str], inputs: dict[str, str], input_type: InputType | None) -> None: for vertex_id in self._is_input_vertices: vertex = self.get_vertex(vertex_id) # If the vertex is not in the input_components list @@ -542,58 +707,63 @@ def _set_inputs(self, input_components: list[str], inputs: Dict[str, str], input continue # If the input_type is not any and the input_type is not in the vertex id # Example: input_type = "chat" and vertex.id = "OpenAI-19ddn" - elif input_type is not None and input_type != "any" and input_type not in vertex.id.lower(): + if input_type is not None and input_type != "any" and input_type not in vertex.id.lower(): continue if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) vertex.update_raw_params(inputs, overwrite=True) async def _run( self, - inputs: Dict[str, str], + *, + inputs: dict[str, str], input_components: list[str], input_type: InputType | None, outputs: list[str], stream: bool, session_id: str, fallback_to_env_vars: bool, - ) -> List[Optional["ResultData"]]: - """ - Runs the graph with the given inputs. + ) -> list[ResultData | None]: + """Runs the graph with the given inputs. Args: inputs (Dict[str, str]): The input values for the graph. input_components (list[str]): The components to run for the inputs. + input_type: (Optional[InputType]): The input type. outputs (list[str]): The outputs to retrieve from the graph. stream (bool): Whether to stream the results or not. session_id (str): The session ID for the graph. + fallback_to_env_vars (bool): Whether to fallback to environment variables. Returns: List[Optional["ResultData"]]: The outputs of the graph. """ - if input_components and not isinstance(input_components, list): - raise ValueError(f"Invalid components value: {input_components}. Expected list") - elif input_components is None: + msg = f"Invalid components value: {input_components}. Expected list" + raise ValueError(msg) + if input_components is None: input_components = [] if not isinstance(inputs.get(INPUT_FIELD_NAME, ""), str): - raise ValueError(f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string") + msg = f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string" + raise TypeError(msg) if inputs: self._set_inputs(input_components, inputs, input_type) # Update all the vertices with the session_id - for vertex_id in self._has_session_id_vertices: + for vertex_id in self.has_session_id_vertices: vertex = self.get_vertex(vertex_id) if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) vertex.update_raw_params({"session_id": session_id}) # Process the graph try: cache_service = get_chat_service() if self.flow_id: await cache_service.set_cache(self.flow_id, self) - except Exception as exc: - logger.exception(exc) + except Exception: # noqa: BLE001 + logger.exception("Error setting cache") try: # Prioritize the webhook component if it exists @@ -601,17 +771,19 @@ async def _run( await self.process(start_component_id=start_component_id, fallback_to_env_vars=fallback_to_env_vars) self.increment_run_count() except Exception as exc: - asyncio.create_task(self.end_all_traces(error=exc)) - raise ValueError(f"Error running graph: {exc}") from exc - finally: - asyncio.create_task(self.end_all_traces()) + self._end_all_traces_async(error=exc) + msg = f"Error running graph: {exc}" + raise ValueError(msg) from exc + + self._end_all_traces_async() # Get the outputs vertex_outputs = [] for vertex in self.vertices: - if not vertex._built: + if not vertex.built: continue if vertex is None: - raise ValueError(f"Vertex {vertex_id} not found") + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) if not vertex.result and not stream and hasattr(vertex, "consume_async_generator"): await vertex.consume_async_generator() @@ -620,80 +792,33 @@ async def _run( return vertex_outputs - def run( - self, - inputs: list[Dict[str, str]], - input_components: Optional[list[list[str]]] = None, - types: Optional[list[InputType | None]] = None, - outputs: Optional[list[str]] = None, - session_id: Optional[str] = None, - stream: bool = False, - fallback_to_env_vars: bool = False, - ) -> List[RunOutputs]: - """ - Run the graph with the given inputs and return the outputs. - - Args: - inputs (Dict[str, str]): A dictionary of input values. - input_components (Optional[list[str]]): A list of input components. - types (Optional[list[str]]): A list of types. - outputs (Optional[list[str]]): A list of output components. - session_id (Optional[str]): The session ID. - stream (bool): Whether to stream the outputs. - - Returns: - List[RunOutputs]: A list of RunOutputs objects representing the outputs. - """ - # run the async function in a sync way - # this could be used in a FastAPI endpoint - # so we should take care of the event loop - coro = self.arun( - inputs=inputs, - inputs_components=input_components, - types=types, - outputs=outputs, - session_id=session_id, - stream=stream, - fallback_to_env_vars=fallback_to_env_vars, - ) - - try: - # Attempt to get the running event loop; if none, an exception is raised - loop = asyncio.get_running_loop() - if loop.is_closed(): - raise RuntimeError("The running event loop is closed.") - except RuntimeError: - # If there's no running event loop or it's closed, use asyncio.run - return asyncio.run(coro) - - # If there's an existing, open event loop, use it to run the async function - return loop.run_until_complete(coro) - async def arun( self, - inputs: list[Dict[str, str]], - inputs_components: Optional[list[list[str]]] = None, - types: Optional[list[InputType | None]] = None, - outputs: Optional[list[str]] = None, - session_id: Optional[str] = None, + inputs: list[dict[str, str]], + *, + inputs_components: list[list[str]] | None = None, + types: list[InputType | None] | None = None, + outputs: list[str] | None = None, + session_id: str | None = None, stream: bool = False, fallback_to_env_vars: bool = False, - ) -> List[RunOutputs]: - """ - Runs the graph with the given inputs. + ) -> list[RunOutputs]: + """Runs the graph with the given inputs. Args: inputs (list[Dict[str, str]]): The input values for the graph. inputs_components (Optional[list[list[str]]], optional): Components to run for the inputs. Defaults to None. + types (Optional[list[Optional[InputType]]], optional): The types of the inputs. Defaults to None. outputs (Optional[list[str]], optional): The outputs to retrieve from the graph. Defaults to None. session_id (Optional[str], optional): The session ID for the graph. Defaults to None. stream (bool, optional): Whether to stream the results or not. Defaults to False. + fallback_to_env_vars (bool, optional): Whether to fallback to environment variables. Defaults to False. Returns: List[RunOutputs]: The outputs of the graph. """ # inputs is {"message": "Hello, world!"} - # we need to go through self.inputs and update the self._raw_params + # we need to go through self.inputs and update the self.raw_params # of the vertices that are inputs # if the value is a list, we need to run multiple times vertex_outputs = [] @@ -711,7 +836,7 @@ async def arun( types = [] for _ in range(len(inputs) - len(types)): types.append("chat") # default to chat - for run_inputs, components, input_type in zip(inputs, inputs_components, types): + for run_inputs, components, input_type in zip(inputs, inputs_components, types, strict=True): run_outputs = await self._run( inputs=run_inputs, input_components=components, @@ -727,8 +852,7 @@ async def arun( return vertex_outputs def next_vertex_to_build(self): - """ - Returns the next vertex to be built. + """Returns the next vertex to be built. Yields: str: The ID of the next vertex to be built. @@ -737,13 +861,12 @@ def next_vertex_to_build(self): @property def metadata(self): - """ - The metadata of the graph. + """The metadata of the graph. Returns: dict: The metadata of the graph. """ - time_format = "%Y-%m-%d %H:%M:%S" + time_format = "%Y-%m-%d %H:%M:%S %Z" return { "start_time": self._start_time.strftime(time_format), "end_time": self._end_time.strftime(time_format), @@ -752,10 +875,8 @@ def metadata(self): "flow_name": self.flow_name, } - def build_graph_maps(self, edges: Optional[List[CycleEdge]] = None, vertices: Optional[List["Vertex"]] = None): - """ - Builds the adjacency maps for the graph. - """ + def build_graph_maps(self, edges: list[CycleEdge] | None = None, vertices: list[Vertex] | None = None) -> None: + """Builds the adjacency maps for the graph.""" if edges is None: edges = self.edges @@ -767,21 +888,19 @@ def build_graph_maps(self, edges: Optional[List[CycleEdge]] = None, vertices: Op self.in_degree_map = self.build_in_degree(edges) self.parent_child_map = self.build_parent_child_map(vertices) - def reset_inactivated_vertices(self): - """ - Resets the inactivated vertices in the graph. - """ + def reset_inactivated_vertices(self) -> None: + """Resets the inactivated vertices in the graph.""" for vertex_id in self.inactivated_vertices.copy(): self.mark_vertex(vertex_id, "ACTIVE") - self.inactivated_vertices = [] + self.inactivated_vertices = set() self.inactivated_vertices = set() - def mark_all_vertices(self, state: str): + def mark_all_vertices(self, state: str) -> None: """Marks all vertices in the graph.""" for vertex in self.vertices: vertex.set_state(state) - def mark_vertex(self, vertex_id: str, state: str): + def mark_vertex(self, vertex_id: str, state: str) -> None: """Marks a vertex in the graph.""" vertex = self.get_vertex(vertex_id) vertex.set_state(state) @@ -789,8 +908,8 @@ def mark_vertex(self, vertex_id: str, state: str): self.run_manager.remove_from_predecessors(vertex_id) def _mark_branch( - self, vertex_id: str, state: str, visited: Optional[set] = None, output_name: Optional[str] = None - ): + self, vertex_id: str, state: str, visited: set | None = None, output_name: str | None = None + ) -> None: """Marks a branch of the graph.""" if visited is None: visited = set() @@ -809,7 +928,7 @@ def _mark_branch( continue self._mark_branch(child_id, state, visited) - def mark_branch(self, vertex_id: str, state: str, output_name: Optional[str] = None): + def mark_branch(self, vertex_id: str, state: str, output_name: str | None = None) -> None: self._mark_branch(vertex_id=vertex_id, state=state, output_name=output_name) new_predecessor_map, _ = self.build_adjacency_maps(self.edges) self.run_manager.update_run_state( @@ -817,23 +936,23 @@ def mark_branch(self, vertex_id: str, state: str, output_name: Optional[str] = N vertices_to_run=self.vertices_to_run, ) - def get_edge(self, source_id: str, target_id: str) -> Optional[CycleEdge]: + def get_edge(self, source_id: str, target_id: str) -> CycleEdge | None: """Returns the edge between two vertices.""" for edge in self.edges: if edge.source_id == source_id and edge.target_id == target_id: return edge return None - def build_parent_child_map(self, vertices: List["Vertex"]): + def build_parent_child_map(self, vertices: list[Vertex]): parent_child_map = defaultdict(list) for vertex in vertices: parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)] return parent_child_map - def increment_run_count(self): + def increment_run_count(self) -> None: self._runs += 1 - def increment_update_count(self): + def increment_update_count(self) -> None: self._updates += 1 def __getstate__(self): @@ -866,7 +985,7 @@ def __getstate__(self): "_edges": self._edges, "_is_input_vertices": self._is_input_vertices, "_is_output_vertices": self._is_output_vertices, - "_has_session_id_vertices": self._has_session_id_vertices, + "has_session_id_vertices": self.has_session_id_vertices, "_sorted_vertices_layers": self._sorted_vertices_layers, } @@ -919,16 +1038,18 @@ def __setstate__(self, state): @classmethod def from_payload( cls, - payload: Dict, - flow_id: Optional[str] = None, - flow_name: Optional[str] = None, - user_id: Optional[str] = None, - ) -> "Graph": - """ - Creates a graph from a payload. + payload: dict, + flow_id: str | None = None, + flow_name: str | None = None, + user_id: str | None = None, + ) -> Graph: + """Creates a graph from a payload. Args: - payload (Dict): The payload to create the graph from.˜` + payload: The payload to create the graph from. + flow_id: The ID of the flow. + flow_name: The flow name. + user_id: The user ID. Returns: Graph: The created graph. @@ -940,16 +1061,16 @@ def from_payload( edges = payload["edges"] graph = cls(flow_id=flow_id, flow_name=flow_name, user_id=user_id) graph.add_nodes_and_edges(vertices, edges) - return graph except KeyError as exc: logger.exception(exc) if "nodes" not in payload and "edges" not in payload: - logger.exception(exc) - raise ValueError( - f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" - ) from exc + msg = f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" + raise ValueError(msg) from exc - raise ValueError(f"Error while creating graph from payload: {exc}") from exc + msg = f"Error while creating graph from payload: {exc}" + raise ValueError(msg) from exc + else: + return graph def __eq__(self, other: object) -> bool: if not isinstance(other, Graph): @@ -962,34 +1083,31 @@ def __eq__(self, other: object) -> bool: # both graphs have the same vertices and edges # but the data of the vertices might be different - def update_edges_from_vertex(self, vertex: "Vertex", other_vertex: "Vertex") -> None: + def update_edges_from_vertex(self, other_vertex: Vertex) -> None: """Updates the edges of a vertex in the Graph.""" new_edges = [] for edge in self.edges: - if edge.source_id == other_vertex.id or edge.target_id == other_vertex.id: + if other_vertex.id in {edge.source_id, edge.target_id}: continue new_edges.append(edge) new_edges += other_vertex.edges self.edges = new_edges - def vertex_data_is_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool: + def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: data_is_equivalent = vertex == other_vertex if not data_is_equivalent: return False return self.vertex_edges_are_identical(vertex, other_vertex) - def vertex_edges_are_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool: + def vertex_edges_are_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: same_length = len(vertex.edges) == len(other_vertex.edges) if not same_length: return False - for edge in vertex.edges: - if edge not in other_vertex.edges: - return False - return True + return all(edge in other_vertex.edges for edge in vertex.edges) - def update(self, other: "Graph") -> "Graph": + def update(self, other: Graph) -> Graph: # Existing vertices in self graph - existing_vertex_ids = set(vertex.id for vertex in self.vertices) + existing_vertex_ids = {vertex.id for vertex in self.vertices} # Vertex IDs in the other graph other_vertex_ids = set(other.vertex_map.keys()) @@ -1001,10 +1119,8 @@ def update(self, other: "Graph") -> "Graph": # Remove vertices that are not in the other graph for vertex_id in removed_vertex_ids: - try: + with contextlib.suppress(ValueError): self.remove_vertex(vertex_id) - except ValueError: - pass # The order here matters because adding the vertex is required # if any of them have edges that point to any of the new vertices @@ -1037,25 +1153,24 @@ def update(self, other: "Graph") -> "Graph": self.increment_update_count() return self - def update_vertex_from_another(self, vertex: "Vertex", other_vertex: "Vertex") -> None: - """ - Updates a vertex from another vertex. + def update_vertex_from_another(self, vertex: Vertex, other_vertex: Vertex) -> None: + """Updates a vertex from another vertex. Args: vertex (Vertex): The vertex to be updated. other_vertex (Vertex): The vertex to update from. """ - vertex._data = other_vertex._data - vertex._parse_data() + vertex.full_data = other_vertex.full_data + vertex.parse_data() # Now we update the edges of the vertex - self.update_edges_from_vertex(vertex, other_vertex) + self.update_edges_from_vertex(other_vertex) vertex.params = {} - vertex._build_params() + vertex.build_params() vertex.graph = self # If the vertex is frozen, we don't want - # to reset the results nor the _built attribute + # to reset the results nor the built attribute if not vertex.frozen: - vertex._built = False + vertex.built = False vertex.result = None vertex.artifacts = {} vertex.set_top_level(self.top_level_vertices) @@ -1068,19 +1183,19 @@ def reset_all_edges_of_vertex(self, vertex: Vertex) -> None: if vid in self.vertex_map: _vertex = self.vertex_map[vid] if not _vertex.frozen: - _vertex._build_params() + _vertex.build_params() def _add_vertex(self, vertex: Vertex) -> None: """Adds a vertex to the graph.""" self.vertices.append(vertex) self.vertex_map[vertex.id] = vertex - def add_vertex(self, vertex: "Vertex") -> None: + def add_vertex(self, vertex: Vertex) -> None: """Adds a new vertex to the graph.""" self._add_vertex(vertex) self._update_edges(vertex) - def _update_edges(self, vertex: "Vertex") -> None: + def _update_edges(self, vertex: Vertex) -> None: """Updates the edges of a vertex.""" # Vertex has edges, so we need to update the edges for edge in vertex.edges: @@ -1096,10 +1211,28 @@ def _build_graph(self) -> None: # This is a hack to make sure that the LLM vertex is sent to # the toolkit vertex self._build_vertex_params() + self._instantiate_components_in_vertices() + self._set_cache_to_vertices_in_cycle() + for vertex in self.vertices: + if vertex.id in self.cycle_vertices: + self.run_manager.add_to_cycle_vertices(vertex.id) + + def _get_edges_as_list_of_tuples(self) -> list[tuple[str, str]]: + """Returns the edges of the graph as a list of tuples.""" + return [(e["data"]["sourceHandle"]["id"], e["data"]["targetHandle"]["id"]) for e in self._edges] - # Now that we have the vertices and edges - # We need to map the vertices that are connected to - # to ChatVertex instances + def _set_cache_to_vertices_in_cycle(self) -> None: + """Sets the cache to the vertices in cycle.""" + edges = self._get_edges_as_list_of_tuples() + cycle_vertices = set(find_cycle_vertices(edges)) + for vertex in self.vertices: + if vertex.id in cycle_vertices: + vertex.apply_on_outputs(lambda output_object: setattr(output_object, "cache", False)) + + def _instantiate_components_in_vertices(self) -> None: + """Instantiates the components in the vertices.""" + for vertex in self.vertices: + vertex.instantiate_component(self.user_id) def remove_vertex(self, vertex_id: str) -> None: """Removes a vertex from the graph.""" @@ -1108,26 +1241,27 @@ def remove_vertex(self, vertex_id: str) -> None: return self.vertices.remove(vertex) self.vertex_map.pop(vertex_id) - self.edges = [edge for edge in self.edges if edge.source_id != vertex_id and edge.target_id != vertex_id] + self.edges = [edge for edge in self.edges if vertex_id not in {edge.source_id, edge.target_id}] def _build_vertex_params(self) -> None: """Identifies and handles the LLM vertex within the graph.""" for vertex in self.vertices: - vertex._build_params() + vertex.build_params() - def _validate_vertex(self, vertex: "Vertex") -> bool: + def _validate_vertex(self, vertex: Vertex) -> bool: """Validates a vertex.""" # All vertices that do not have edges are invalid return len(self.get_vertex_edges(vertex.id)) > 0 - def get_vertex(self, vertex_id: str, silent: bool = False) -> "Vertex": + def get_vertex(self, vertex_id: str) -> Vertex: """Returns a vertex by id.""" try: return self.vertex_map[vertex_id] - except KeyError: - raise ValueError(f"Vertex {vertex_id} not found") + except KeyError as e: + msg = f"Vertex {vertex_id} not found" + raise ValueError(msg) from e - def get_root_of_group_node(self, vertex_id: str) -> "Vertex": + def get_root_of_group_node(self, vertex_id: str) -> Vertex: """Returns the root of a group node.""" if vertex_id in self.top_level_vertices: # Get all vertices with vertex_id as .parent_node_id @@ -1139,26 +1273,29 @@ def get_root_of_group_node(self, vertex_id: str) -> "Vertex": successors = self.get_all_successors(vertex, recursive=False) if not any(successor in vertices for successor in successors): return vertex - raise ValueError(f"Vertex {vertex_id} is not a top level vertex or no root vertex found") + msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found" + raise ValueError(msg) def get_next_in_queue(self): if not self._run_queue: return None return self._run_queue.popleft() - def extend_run_queue(self, vertices: List[str]): + def extend_run_queue(self, vertices: list[str]) -> None: self._run_queue.extend(vertices) async def astep( self, - inputs: Optional["InputValueRequest"] = None, - files: Optional[list[str]] = None, - user_id: Optional[str] = None, + inputs: InputValueRequest | None = None, + files: list[str] | None = None, + user_id: str | None = None, + event_manager: EventManager | None = None, ): if not self._prepared: - raise ValueError("Graph not prepared. Call prepare() first.") + msg = "Graph not prepared. Call prepare() first." + raise ValueError(msg) if not self._run_queue: - asyncio.create_task(self.end_all_traces()) + self._end_all_traces_async() return Finish() vertex_id = self.get_next_in_queue() chat_service = get_chat_service() @@ -1169,6 +1306,7 @@ async def astep( files=files, get_cache=chat_service.get_cache, set_cache=chat_service.set_cache, + event_manager=event_manager, ) next_runnable_vertices = await self.get_next_runnable_vertices( @@ -1196,16 +1334,16 @@ def get_snapshot(self): } ) - def _record_snapshot(self, vertex_id: str | None = None, start: bool = False): + def _record_snapshot(self, vertex_id: str | None = None) -> None: self._snapshots.append(self.get_snapshot()) if vertex_id: self._call_order.append(vertex_id) def step( self, - inputs: Optional["InputValueRequest"] = None, - files: Optional[list[str]] = None, - user_id: Optional[str] = None, + inputs: InputValueRequest | None = None, + files: list[str] | None = None, + user_id: str | None = None, ): # Call astep but synchronously loop = asyncio.get_event_loop() @@ -1214,22 +1352,26 @@ def step( async def build_vertex( self, vertex_id: str, + *, get_cache: GetCache | None = None, set_cache: SetCache | None = None, - inputs_dict: Optional[Dict[str, str]] = None, - files: Optional[list[str]] = None, - user_id: Optional[str] = None, + inputs_dict: dict[str, str] | None = None, + files: list[str] | None = None, + user_id: str | None = None, fallback_to_env_vars: bool = False, + event_manager: EventManager | None = None, ) -> VertexBuildResult: - """ - Builds a vertex in the graph. + """Builds a vertex in the graph. Args: - lock (asyncio.Lock): A lock to synchronize access to the graph. - set_cache_coro (Coroutine): A coroutine to set the cache. vertex_id (str): The ID of the vertex to build. - inputs (Optional[Dict[str, str]]): Optional dictionary of inputs for the vertex. Defaults to None. + get_cache (GetCache): A coroutine to get the cache. + set_cache (SetCache): A coroutine to set the cache. + inputs_dict (Optional[Dict[str, str]]): Optional dictionary of inputs for the vertex. Defaults to None. + files: (Optional[List[str]]): Optional list of files. Defaults to None. user_id (Optional[str]): Optional user ID. Defaults to None. + fallback_to_env_vars (bool): Whether to fallback to environment variables. Defaults to False. + event_manager (Optional[EventManager]): Optional event manager. Defaults to None. Returns: Tuple: A tuple containing the next runnable vertices, top level vertices, result dictionary, @@ -1250,67 +1392,74 @@ async def build_vertex( if get_cache is not None: cached_result = await get_cache(key=vertex.id) else: - cached_result = None + cached_result = CacheMiss() if isinstance(cached_result, CacheMiss): should_build = True else: try: cached_vertex_dict = cached_result["result"] # Now set update the vertex with the cached vertex - vertex._built = cached_vertex_dict["_built"] + vertex.built = cached_vertex_dict["built"] vertex.artifacts = cached_vertex_dict["artifacts"] - vertex._built_object = cached_vertex_dict["_built_object"] - vertex._built_result = cached_vertex_dict["_built_result"] - vertex._data = cached_vertex_dict["_data"] + vertex.built_object = cached_vertex_dict["built_object"] + vertex.built_result = cached_vertex_dict["built_result"] + vertex.full_data = cached_vertex_dict["full_data"] vertex.results = cached_vertex_dict["results"] try: - vertex._finalize_build() + vertex.finalize_build() if vertex.result is not None: vertex.result.used_frozen_result = True - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error finalizing build") should_build = True except KeyError: should_build = True if should_build: await vertex.build( - user_id=user_id, inputs=inputs_dict, fallback_to_env_vars=fallback_to_env_vars, files=files + user_id=user_id, + inputs=inputs_dict, + fallback_to_env_vars=fallback_to_env_vars, + files=files, + event_manager=event_manager, ) if set_cache is not None: vertex_dict = { - "_built": vertex._built, + "built": vertex.built, "results": vertex.results, "artifacts": vertex.artifacts, - "_built_object": vertex._built_object, - "_built_result": vertex._built_result, - "_data": vertex._data, + "built_object": vertex.built_object, + "built_result": vertex.built_result, + "full_data": vertex.full_data, } await set_cache(key=vertex.id, data=vertex_dict) - if vertex.result is not None: - params = f"{vertex._built_object_repr()}{params}" - valid = True - result_dict = vertex.result - artifacts = vertex.artifacts - else: - raise ValueError(f"No result found for vertex {vertex_id}") - - vertex_build_result = VertexBuildResult( - result_dict=result_dict, params=params, valid=valid, artifacts=artifacts, vertex=vertex - ) - return vertex_build_result except Exception as exc: - if not isinstance(exc, ComponentBuildException): - logger.exception(f"Error building Component: \n\n{exc}") - raise exc + if not isinstance(exc, ComponentBuildError): + logger.exception("Error building Component") + raise + + if vertex.result is not None: + params = f"{vertex.built_object_repr()}{params}" + valid = True + result_dict = vertex.result + artifacts = vertex.artifacts + else: + msg = f"Error building Component: no result found for vertex {vertex_id}" + raise ValueError(msg) + + return VertexBuildResult( + result_dict=result_dict, params=params, valid=valid, artifacts=artifacts, vertex=vertex + ) def get_vertex_edges( self, vertex_id: str, - is_target: Optional[bool] = None, - is_source: Optional[bool] = None, - ) -> List[CycleEdge]: + *, + is_target: bool | None = None, + is_source: bool | None = None, + ) -> list[CycleEdge]: """Returns a list of edges for a given vertex.""" # The idea here is to return the edges that have the vertex_id as source or target # or both @@ -1321,9 +1470,9 @@ def get_vertex_edges( or (edge.target_id == vertex_id and is_target is not False) ] - def get_vertices_with_target(self, vertex_id: str) -> List["Vertex"]: + def get_vertices_with_target(self, vertex_id: str) -> list[Vertex]: """Returns the vertices connected to a vertex.""" - vertices: List["Vertex"] = [] + vertices: list[Vertex] = [] for edge in self.edges: if edge.target_id == vertex_id: vertex = self.get_vertex(edge.source_id) @@ -1332,11 +1481,10 @@ def get_vertices_with_target(self, vertex_id: str) -> List["Vertex"]: vertices.append(vertex) return vertices - async def process(self, fallback_to_env_vars: bool, start_component_id: Optional[str] = None) -> "Graph": + async def process(self, *, fallback_to_env_vars: bool, start_component_id: str | None = None) -> Graph: """Processes the graph with vertices in each layer run in parallel.""" - first_layer = self.sort_vertices(start_component_id=start_component_id) - vertex_task_run_count: Dict[str, int] = {} + vertex_task_run_count: dict[str, int] = {} to_process = deque(first_layer) layer_index = 0 chat_service = get_chat_service() @@ -1344,7 +1492,7 @@ async def process(self, fallback_to_env_vars: bool, start_component_id: Optional self.set_run_id(run_id) self.set_run_name() await self.initialize_run() - lock = chat_service._async_cache_locks[self.run_id] + lock = chat_service.async_cache_locks[self.run_id] while to_process: current_batch = list(to_process) # Copy current deque items to a list to_process.clear() # Clear the deque for new items @@ -1365,12 +1513,12 @@ async def process(self, fallback_to_env_vars: bool, start_component_id: Optional tasks.append(task) vertex_task_run_count[vertex_id] = vertex_task_run_count.get(vertex_id, 0) + 1 - logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks") + logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks, {current_batch}") try: next_runnable_vertices = await self._execute_tasks(tasks, lock=lock) - except Exception as e: - logger.error(f"Error executing tasks in layer {layer_index}: {e}") - raise e + except Exception: + logger.exception(f"Error executing tasks in layer {layer_index}") + raise if not next_runnable_vertices: break to_process.extend(next_runnable_vertices) @@ -1379,22 +1527,22 @@ async def process(self, fallback_to_env_vars: bool, start_component_id: Optional logger.debug("Graph processing complete") return self - def find_next_runnable_vertices(self, vertex_id: str, vertex_successors_ids: List[str]) -> List[str]: + def find_next_runnable_vertices(self, vertex_successors_ids: list[str]) -> list[str]: next_runnable_vertices = set() - for v_id in vertex_successors_ids: + for v_id in sorted(vertex_successors_ids): if not self.is_vertex_runnable(v_id): next_runnable_vertices.update(self.find_runnable_predecessors_for_successor(v_id)) else: next_runnable_vertices.add(v_id) - return list(next_runnable_vertices) + return sorted(next_runnable_vertices) - async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: "Vertex", cache: bool = True) -> List[str]: + async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: Vertex, *, cache: bool = True) -> list[str]: v_id = vertex.id v_successors_ids = vertex.successors_ids async with lock: self.run_manager.remove_vertex_from_runnables(v_id) - next_runnable_vertices = self.find_next_runnable_vertices(v_id, v_successors_ids) + next_runnable_vertices = self.find_next_runnable_vertices(v_successors_ids) for next_v_id in set(next_runnable_vertices): # Use set to avoid duplicates if next_v_id == v_id: @@ -1406,11 +1554,11 @@ async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: "Vertex", await set_cache_coro(data=self, lock=lock) return next_runnable_vertices - async def _execute_tasks(self, tasks: List[asyncio.Task], lock: asyncio.Lock) -> List[str]: + async def _execute_tasks(self, tasks: list[asyncio.Task], lock: asyncio.Lock) -> list[str]: """Executes tasks in parallel, handling exceptions for each task.""" results = [] completed_tasks = await asyncio.gather(*tasks, return_exceptions=True) - vertices: List["Vertex"] = [] + vertices: list[Vertex] = [] for i, result in enumerate(completed_tasks): task_name = tasks[i].get_name() @@ -1420,10 +1568,11 @@ async def _execute_tasks(self, tasks: List[asyncio.Task], lock: asyncio.Lock) -> for t in tasks[i + 1 :]: t.cancel() raise result - elif isinstance(result, tuple) and len(result) == 5: - vertices.append(result[4]) + if isinstance(result, VertexBuildResult): + vertices.append(result.vertex) else: - raise ValueError(f"Invalid result from task {task_name}: {result}") + msg = f"Invalid result from task {task_name}: {result}" + raise TypeError(msg) for v in vertices: # set all executed vertices as non-runnable to not run them again. @@ -1431,15 +1580,15 @@ async def _execute_tasks(self, tasks: List[asyncio.Task], lock: asyncio.Lock) -> # This could usually happen with input vertices like ChatInput self.run_manager.remove_vertex_from_runnables(v.id) + logger.debug(f"Vertex {v.id}, result: {v.built_result}, object: {v.built_object}") + for v in vertices: next_runnable_vertices = await self.get_next_runnable_vertices(lock, vertex=v, cache=False) results.extend(next_runnable_vertices) - no_duplicate_results = list(set(results)) - return no_duplicate_results + return list(set(results)) - def topological_sort(self) -> List["Vertex"]: - """ - Performs a topological sort of the vertices in the graph. + def topological_sort(self) -> list[Vertex]: + """Performs a topological sort of the vertices in the graph. Returns: List[Vertex]: A list of vertices in topological order. @@ -1448,13 +1597,14 @@ def topological_sort(self) -> List["Vertex"]: ValueError: If the graph contains a cycle. """ # States: 0 = unvisited, 1 = visiting, 2 = visited - state = {vertex: 0 for vertex in self.vertices} + state = dict.fromkeys(self.vertices, 0) sorted_vertices = [] - def dfs(vertex): + def dfs(vertex) -> None: if state[vertex] == 1: # We have a cycle - raise ValueError("Graph contains a cycle, cannot perform topological sort") + msg = "Graph contains a cycle, cannot perform topological sort" + raise ValueError(msg) if state[vertex] == 0: state[vertex] = 1 for edge in vertex.edges: @@ -1470,7 +1620,7 @@ def dfs(vertex): return list(reversed(sorted_vertices)) - def generator_build(self) -> Generator["Vertex", None, None]: + def generator_build(self) -> Generator[Vertex, None, None]: """Builds each vertex in the graph and yields it.""" sorted_vertices = self.topological_sort() logger.debug("There are %s vertices in the graph", len(sorted_vertices)) @@ -1480,7 +1630,7 @@ def get_predecessors(self, vertex): """Returns the predecessors of a vertex.""" return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])] - def get_all_successors(self, vertex: "Vertex", recursive=True, flat=True, visited=None): + def get_all_successors(self, vertex: Vertex, *, recursive=True, flat=True, visited=None): if visited is None: visited = set() @@ -1509,17 +1659,17 @@ def get_all_successors(self, vertex: "Vertex", recursive=True, flat=True, visite successors_result.append([successor]) if not flat and successors_result: - return [successors] + successors_result + return [successors, *successors_result] return successors_result - def get_successors(self, vertex: "Vertex") -> List["Vertex"]: + def get_successors(self, vertex: Vertex) -> list[Vertex]: """Returns the successors of a vertex.""" return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, [])] - def get_vertex_neighbors(self, vertex: "Vertex") -> Dict["Vertex", int]: + def get_vertex_neighbors(self, vertex: Vertex) -> dict[Vertex, int]: """Returns the neighbors of a vertex.""" - neighbors: Dict["Vertex", int] = {} + neighbors: dict[Vertex, int] = {} for edge in self.edges: if edge.source_id == vertex.id: neighbor = self.get_vertex(edge.target_id) @@ -1537,52 +1687,76 @@ def get_vertex_neighbors(self, vertex: "Vertex") -> Dict["Vertex", int]: neighbors[neighbor] += 1 return neighbors - def _build_edges(self) -> List[CycleEdge]: + @property + def cycles(self): + if self._cycles is None: + if self._start is None: + self._cycles = [] + else: + entry_vertex = self._start._id + edges = [(e["data"]["sourceHandle"]["id"], e["data"]["targetHandle"]["id"]) for e in self._edges] + self._cycles = find_all_cycle_edges(entry_vertex, edges) + return self._cycles + + @property + def cycle_vertices(self): + if self._cycle_vertices is None: + edges = self._get_edges_as_list_of_tuples() + self._cycle_vertices = set(find_cycle_vertices(edges)) + return self._cycle_vertices + + def _build_edges(self) -> list[CycleEdge]: """Builds the edges of the graph.""" # Edge takes two vertices as arguments, so we need to build the vertices first # and then build the edges # if we can't find a vertex, we raise an error - - edges: set[CycleEdge] = set() + edges: set[CycleEdge | Edge] = set() for edge in self._edges: new_edge = self.build_edge(edge) edges.add(new_edge) if self.vertices and not edges: - warnings.warn("Graph has vertices but no edges") - return list(edges) + logger.warning("Graph has vertices but no edges") + return list(cast(Iterable[CycleEdge], edges)) - def build_edge(self, edge: EdgeData) -> CycleEdge: + def build_edge(self, edge: EdgeData) -> CycleEdge | Edge: source = self.get_vertex(edge["source"]) target = self.get_vertex(edge["target"]) if source is None: - raise ValueError(f"Source vertex {edge['source']} not found") + msg = f"Source vertex {edge['source']} not found" + raise ValueError(msg) if target is None: - raise ValueError(f"Target vertex {edge['target']} not found") - new_edge = CycleEdge(source, target, edge) + msg = f"Target vertex {edge['target']} not found" + raise ValueError(msg) + if any(v in self.cycle_vertices for v in [source.id, target.id]): + new_edge: CycleEdge | Edge = CycleEdge(source, target, edge) + else: + new_edge = Edge(source, target, edge) return new_edge - def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type["Vertex"]: + def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> type[Vertex]: """Returns the node class based on the node type.""" # First we check for the node_base_type node_name = node_id.split("-")[0] if node_name in InterfaceComponentTypes: return InterfaceVertex - elif node_name in ["SharedState", "Notify", "Listen"]: + if node_name in {"SharedState", "Notify", "Listen"}: return StateVertex - elif node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: - return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type] - elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP: - return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name] + if node_base_type in lazy_load_vertex_dict.vertex_type_map: + return lazy_load_vertex_dict.vertex_type_map[node_base_type] + if node_name in lazy_load_vertex_dict.vertex_type_map: + return lazy_load_vertex_dict.vertex_type_map[node_name] - if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: - return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] + if node_type in lazy_load_vertex_dict.vertex_type_map: + return lazy_load_vertex_dict.vertex_type_map[node_type] return Vertex - def _build_vertices(self) -> List["Vertex"]: + def _build_vertices(self) -> list[Vertex]: """Builds the vertices of the graph.""" - vertices: List["Vertex"] = [] + vertices: list[Vertex] = [] for frontend_data in self._vertices: + if frontend_data.get("type") == NodeTypeEnum.NoteNode: + continue try: vertex_instance = self.get_vertex(frontend_data["id"]) except ValueError: @@ -1593,42 +1767,45 @@ def _build_vertices(self) -> List["Vertex"]: def _create_vertex(self, frontend_data: NodeData): vertex_data = frontend_data["data"] - vertex_type: str = vertex_data["type"] # type: ignore - vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore + vertex_type: str = vertex_data["type"] + vertex_base_type: str = vertex_data["node"]["template"]["_type"] if "id" not in vertex_data: - raise ValueError(f"Vertex data for {vertex_data['display_name']} does not contain an id") + msg = f"Vertex data for {vertex_data['display_name']} does not contain an id" + raise ValueError(msg) - VertexClass = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"]) + vertex_class = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"]) - vertex_instance = VertexClass(frontend_data, graph=self) + vertex_instance = vertex_class(frontend_data, graph=self) vertex_instance.set_top_level(self.top_level_vertices) return vertex_instance - def prepare(self, stop_component_id: Optional[str] = None, start_component_id: Optional[str] = None): + def prepare(self, stop_component_id: str | None = None, start_component_id: str | None = None): self.initialize() if stop_component_id and start_component_id: - raise ValueError("You can only provide one of stop_component_id or start_component_id") + msg = "You can only provide one of stop_component_id or start_component_id" + raise ValueError(msg) self.validate_stream() - self.edges = self._build_edges() if stop_component_id or start_component_id: try: first_layer = self.sort_vertices(stop_component_id, start_component_id) - except Exception as exc: - logger.error(exc) + except Exception: # noqa: BLE001 + logger.exception("Error sorting vertices") first_layer = self.sort_vertices() else: first_layer = self.sort_vertices() for vertex_id in first_layer: self.run_manager.add_to_vertices_being_run(vertex_id) + if vertex_id in self.cycle_vertices: + self.run_manager.add_to_cycle_vertices(vertex_id) self._first_layer = sorted(first_layer) self._run_queue = deque(self._first_layer) self._prepared = True self._record_snapshot() return self - def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[Vertex]: + def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> list[Vertex]: """Returns the children of a vertex based on the vertex type.""" children = [] vertex_types = [vertex.data["type"]] @@ -1638,7 +1815,7 @@ def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[ children.append(vertex) return children - def __repr__(self): + def __repr__(self) -> str: vertex_ids = [vertex.id for vertex in self.vertices] edges_repr = "\n".join([f" {edge.source_id} --> {edge.target_id}" for edge in self.edges]) @@ -1653,19 +1830,34 @@ def __repr__(self): def layered_topological_sort( self, - vertices: List["Vertex"], + vertices: list[Vertex], + *, filter_graphs: bool = False, - ) -> List[List[str]]: + ) -> list[list[str]]: """Performs a layered topological sort of the vertices in the graph.""" vertices_ids = {vertex.id for vertex in vertices} # Queue for vertices with no incoming edges - queue = deque( - vertex.id - for vertex in vertices - # if filter_graphs then only vertex.is_input will be considered - if self.in_degree_map[vertex.id] == 0 and (not filter_graphs or vertex.is_input) - ) - layers: List[List[str]] = [] + in_degree_map = self.in_degree_map.copy() + if self.is_cyclic and all(in_degree_map.values()): + # This means we have a cycle because all vertex have in_degree_map > 0 + # because of this we set the queue to start on the ._start if it exists + if self._start is not None: + queue = deque([self._start._id]) + else: + # Find the chat input component + chat_input = find_start_component_id(vertices_ids) + if chat_input is None: + msg = "No input component found and no start component provided" + raise ValueError(msg) + queue = deque([chat_input]) + else: + queue = deque( + vertex.id + for vertex in vertices + # if filter_graphs then only vertex.is_input will be considered + if in_degree_map[vertex.id] == 0 and (not filter_graphs or vertex.is_input) + ) + layers: list[list[str]] = [] visited = set(queue) current_layer = 0 @@ -1685,20 +1877,19 @@ def layered_topological_sort( if neighbor not in vertices_ids: continue - self.in_degree_map[neighbor] -= 1 # 'remove' edge - if self.in_degree_map[neighbor] == 0 and neighbor not in visited: + in_degree_map[neighbor] -= 1 # 'remove' edge + if in_degree_map[neighbor] == 0 and neighbor not in visited: queue.append(neighbor) # if > 0 it might mean not all predecessors have added to the queue # so we should process the neighbors predecessors - elif self.in_degree_map[neighbor] > 0: + elif in_degree_map[neighbor] > 0: for predecessor in self.predecessor_map[neighbor]: if predecessor not in queue and predecessor not in visited: queue.append(predecessor) current_layer += 1 # Next layer - new_layers = self.refine_layers(layers) - return new_layers + return self.refine_layers(layers) def refine_layers(self, initial_layers): # Map each vertex to its current layer @@ -1734,26 +1925,22 @@ def refine_layers(self, initial_layers): refined_layers[layer_index].append(vertex_id) # Remove empty layers if any - refined_layers = [layer for layer in refined_layers if layer] - - return refined_layers + return [layer for layer in refined_layers if layer] - def sort_chat_inputs_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: + def sort_chat_inputs_first(self, vertices_layers: list[list[str]]) -> list[list[str]]: chat_inputs_first = [] for layer in vertices_layers: - for vertex_id in layer: - if "ChatInput" in vertex_id: - # Remove the ChatInput from the layer - layer.remove(vertex_id) - chat_inputs_first.append(vertex_id) + layer_chat_inputs_first = [vertex_id for vertex_id in layer if "ChatInput" in vertex_id] + chat_inputs_first.extend(layer_chat_inputs_first) + for vertex_id in layer_chat_inputs_first: + # Remove the ChatInput from the layer + layer.remove(vertex_id) if not chat_inputs_first: return vertices_layers - vertices_layers = [chat_inputs_first] + vertices_layers - - return vertices_layers + return [chat_inputs_first, *vertices_layers] - def sort_layer_by_dependency(self, vertices_layers: List[List[str]]) -> List[List[str]]: + def sort_layer_by_dependency(self, vertices_layers: list[list[str]]) -> list[list[str]]: """Sorts the vertices in each layer by dependency, ensuring no vertex depends on a subsequent vertex.""" sorted_layers = [] @@ -1763,16 +1950,14 @@ def sort_layer_by_dependency(self, vertices_layers: List[List[str]]) -> List[Lis return sorted_layers - def _sort_single_layer_by_dependency(self, layer: List[str]) -> List[str]: + def _sort_single_layer_by_dependency(self, layer: list[str]) -> list[str]: """Sorts a single layer by dependency using a stable sorting method.""" # Build a map of each vertex to its index in the layer for quick lookup. index_map = {vertex: index for index, vertex in enumerate(layer)} # Create a sorted copy of the layer based on dependency order. - sorted_layer = sorted(layer, key=lambda vertex: self._max_dependency_index(vertex, index_map), reverse=True) + return sorted(layer, key=lambda vertex: self._max_dependency_index(vertex, index_map), reverse=True) - return sorted_layer - - def _max_dependency_index(self, vertex_id: str, index_map: Dict[str, int]) -> int: + def _max_dependency_index(self, vertex_id: str, index_map: dict[str, int]) -> int: """Finds the highest index a given vertex's dependencies occupy in the same layer.""" vertex = self.get_vertex(vertex_id) max_index = -1 @@ -1781,9 +1966,9 @@ def _max_dependency_index(self, vertex_id: str, index_map: Dict[str, int]) -> in max_index = max(max_index, index_map[successor.id]) return max_index - def __to_dict(self) -> Dict[str, Dict[str, List[str]]]: + def __to_dict(self) -> dict[str, dict[str, list[str]]]: """Converts the graph to a dictionary.""" - result: Dict = dict() + result: dict = {} for vertex in self.vertices: vertex_id = vertex.id sucessors = [i.id for i in self.get_all_successors(vertex)] @@ -1791,16 +1976,19 @@ def __to_dict(self) -> Dict[str, Dict[str, List[str]]]: result |= {vertex_id: {"successors": sucessors, "predecessors": predecessors}} return result - def __filter_vertices(self, vertex_id: str, is_start: bool = False): + def __filter_vertices(self, vertex_id: str, *, is_start: bool = False): dictionaryized_graph = self.__to_dict() - vertex_ids = sort_up_to_vertex(dictionaryized_graph, vertex_id, is_start) + parent_node_map = {vertex.id: vertex.parent_node_id for vertex in self.vertices} + vertex_ids = sort_up_to_vertex( + graph=dictionaryized_graph, vertex_id=vertex_id, parent_node_map=parent_node_map, is_start=is_start + ) return [self.get_vertex(vertex_id) for vertex_id in vertex_ids] def sort_vertices( self, - stop_component_id: Optional[str] = None, - start_component_id: Optional[str] = None, - ) -> List[str]: + stop_component_id: str | None = None, + start_component_id: str | None = None, + ) -> list[str]: """Sorts the vertices in the graph.""" self.mark_all_vertices("ACTIVE") if stop_component_id is not None: @@ -1818,7 +2006,8 @@ def sort_vertices( vertices_layers = self.layered_topological_sort(vertices) vertices_layers = self.sort_by_avg_build_time(vertices_layers) - # vertices_layers = self.sort_chat_inputs_first(vertices_layers) + # Sort the chat inputs first to speed up sending the User message to the UI + vertices_layers = self.sort_chat_inputs_first(vertices_layers) # Now we should sort each layer in a way that we make sure # vertex V does not depend on vertex V+1 vertices_layers = self.sort_layer_by_dependency(vertices_layers) @@ -1827,32 +2016,31 @@ def sort_vertices( first_layer = vertices_layers[0] # save the only the rest self.vertices_layers = vertices_layers[1:] - self.vertices_to_run = {vertex_id for vertex_id in chain.from_iterable(vertices_layers)} + self.vertices_to_run = set(chain.from_iterable(vertices_layers)) self.build_run_map() # Return just the first layer self._first_layer = first_layer return first_layer - def sort_interface_components_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: + def sort_interface_components_first(self, vertices_layers: list[list[str]]) -> list[list[str]]: """Sorts the vertices in the graph so that vertices containing ChatInput or ChatOutput come first.""" def contains_interface_component(vertex): return any(component.value in vertex for component in InterfaceComponentTypes) # Sort each inner list so that vertices containing ChatInput or ChatOutput come first - sorted_vertices = [ + return [ sorted( inner_list, key=lambda vertex: not contains_interface_component(vertex), ) for inner_list in vertices_layers ] - return sorted_vertices - def sort_by_avg_build_time(self, vertices_layers: List[List[str]]) -> List[List[str]]: + def sort_by_avg_build_time(self, vertices_layers: list[list[str]]) -> list[list[str]]: """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" - def sort_layer_by_avg_build_time(vertices_ids: List[str]) -> List[str]: + def sort_layer_by_avg_build_time(vertices_ids: list[str]) -> list[str]: """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" if len(vertices_ids) == 1: return vertices_ids @@ -1860,17 +2048,15 @@ def sort_layer_by_avg_build_time(vertices_ids: List[str]) -> List[str]: return vertices_ids - sorted_vertices = [sort_layer_by_avg_build_time(layer) for layer in vertices_layers] - return sorted_vertices + return [sort_layer_by_avg_build_time(layer) for layer in vertices_layers] def is_vertex_runnable(self, vertex_id: str) -> bool: """Returns whether a vertex is runnable.""" is_active = self.get_vertex(vertex_id).is_active() - return self.run_manager.is_vertex_runnable(vertex_id, is_active) + return self.run_manager.is_vertex_runnable(vertex_id, is_active=is_active) - def build_run_map(self): - """ - Builds the run map for the graph. + def build_run_map(self) -> None: + """Builds the run map for the graph. This method is responsible for building the run map for the graph, which maps each node in the graph to its corresponding run function. @@ -1880,9 +2066,9 @@ def build_run_map(self): """ self.run_manager.build_run_map(predecessor_map=self.predecessor_map, vertices_to_run=self.vertices_to_run) - def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]: - """ - For each successor of the current vertex, find runnable predecessors if any. + def find_runnable_predecessors_for_successors(self, vertex_id: str) -> list[str]: + """For each successor of the current vertex, find runnable predecessors if any. + This checks the direct predecessors of each successor to identify any that are immediately runnable, expanding the search to ensure progress can be made. """ @@ -1890,19 +2076,19 @@ def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str] for successor_id in self.run_manager.run_map.get(vertex_id, []): runnable_vertices.extend(self.find_runnable_predecessors_for_successor(successor_id)) - return runnable_vertices + return sorted(runnable_vertices) - def find_runnable_predecessors_for_successor(self, vertex_id: str) -> List[str]: + def find_runnable_predecessors_for_successor(self, vertex_id: str) -> list[str]: runnable_vertices = [] visited = set() - def find_runnable_predecessors(predecessor: "Vertex"): + def find_runnable_predecessors(predecessor: Vertex) -> None: predecessor_id = predecessor.id if predecessor_id in visited: return visited.add(predecessor_id) is_active = self.get_vertex(predecessor_id).is_active() - if self.run_manager.is_vertex_runnable(predecessor_id, is_active): + if self.run_manager.is_vertex_runnable(predecessor_id, is_active=is_active): runnable_vertices.append(predecessor_id) else: for pred_pred_id in self.run_manager.run_predecessors.get(predecessor_id, []): @@ -1912,15 +2098,14 @@ def find_runnable_predecessors(predecessor: "Vertex"): find_runnable_predecessors(self.get_vertex(predecessor_id)) return runnable_vertices - def remove_from_predecessors(self, vertex_id: str): + def remove_from_predecessors(self, vertex_id: str) -> None: self.run_manager.remove_from_predecessors(vertex_id) - def remove_vertex_from_runnables(self, vertex_id: str): + def remove_vertex_from_runnables(self, vertex_id: str) -> None: self.run_manager.remove_vertex_from_runnables(vertex_id) def get_top_level_vertices(self, vertices_ids): - """ - Retrieves the top-level vertices from the given graph based on the provided vertex IDs. + """Retrieves the top-level vertices from the given graph based on the provided vertex IDs. Args: vertices_ids (list): A list of vertex IDs. @@ -1938,8 +2123,8 @@ def get_top_level_vertices(self, vertices_ids): top_level_vertices.append(vertex_id) return top_level_vertices - def build_in_degree(self, edges: List[CycleEdge]) -> Dict[str, int]: - in_degree: Dict[str, int] = defaultdict(int) + def build_in_degree(self, edges: list[CycleEdge]) -> dict[str, int]: + in_degree: dict[str, int] = defaultdict(int) for edge in edges: in_degree[edge.target_id] += 1 for vertex in self.vertices: @@ -1947,7 +2132,7 @@ def build_in_degree(self, edges: List[CycleEdge]) -> Dict[str, int]: in_degree[vertex.id] = 0 return in_degree - def build_adjacency_maps(self, edges: List[CycleEdge]) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]: + def build_adjacency_maps(self, edges: list[CycleEdge]) -> tuple[dict[str, list[str]], dict[str, list[str]]]: """Returns the adjacency maps for the graph.""" predecessor_map: dict[str, list[str]] = defaultdict(list) successor_map: dict[str, list[str]] = defaultdict(list) diff --git a/src/backend/base/langflow/graph/graph/constants.py b/src/backend/base/langflow/graph/graph/constants.py index 51da1182a0ce..b5a1e411d583 100644 --- a/src/backend/base/langflow/graph/graph/constants.py +++ b/src/backend/base/langflow/graph/graph/constants.py @@ -3,7 +3,7 @@ class Finish: - def __bool__(self): + def __bool__(self) -> bool: return True def __eq__(self, other): @@ -17,12 +17,12 @@ def _import_vertex_types(): class VertexTypesDict(LazyLoadDictBase): - def __init__(self): + def __init__(self) -> None: self._all_types_dict = None self._types = _import_vertex_types @property - def VERTEX_TYPE_MAP(self): + def vertex_type_map(self): return self.all_types_dict def _build_dict(self): @@ -35,9 +35,9 @@ def _build_dict(self): def get_type_dict(self): types = self._types() return { - **{t: types.CustomComponentVertex for t in ["CustomComponent"]}, - **{t: types.ComponentVertex for t in ["Component"]}, - **{t: types.InterfaceVertex for t in CHAT_COMPONENTS}, + "CustomComponent": types.CustomComponentVertex, + "Component": types.ComponentVertex, + **dict.fromkeys(CHAT_COMPONENTS, types.InterfaceVertex), } def get_custom_component_vertex_type(self): diff --git a/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py b/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py index 7b16684386ff..9abc227e9db3 100644 --- a/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py +++ b/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py @@ -3,10 +3,11 @@ class RunnableVerticesManager: def __init__(self): - self.run_map = defaultdict(list) # Tracks successors of each vertex - self.run_predecessors = defaultdict(set) # Tracks predecessors for each vertex - self.vertices_to_run = set() # Set of vertices that are ready to run - self.vertices_being_run = set() # Set of vertices that are currently running + self.run_map: dict[str, list[str]] = defaultdict(list) # Tracks successors of each vertex + self.run_predecessors: dict[str, set[str]] = defaultdict(set) # Tracks predecessors for each vertex + self.vertices_to_run: set[str] = set() # Set of vertices that are ready to run + self.vertices_being_run: set[str] = set() # Set of vertices that are currently running + self.cycle_vertices: set[str] = set() # Set of vertices that are in a cycle def to_dict(self) -> dict: return { @@ -42,12 +43,12 @@ def __setstate__(self, state: dict) -> None: def all_predecessors_are_fulfilled(self) -> bool: return all(not value for value in self.run_predecessors.values()) - def update_run_state(self, run_predecessors: dict, vertices_to_run: set): + def update_run_state(self, run_predecessors: dict, vertices_to_run: set) -> None: self.run_predecessors.update(run_predecessors) self.vertices_to_run.update(vertices_to_run) self.build_run_map(self.run_predecessors, self.vertices_to_run) - def is_vertex_runnable(self, vertex_id: str, is_active: bool) -> bool: + def is_vertex_runnable(self, vertex_id: str, *, is_active: bool) -> bool: """Determines if a vertex is runnable.""" if not is_active: return False @@ -55,21 +56,19 @@ def is_vertex_runnable(self, vertex_id: str, is_active: bool) -> bool: return False if vertex_id not in self.vertices_to_run: return False - if not self.are_all_predecessors_fulfilled(vertex_id): - return False - return True + return self.are_all_predecessors_fulfilled(vertex_id) or vertex_id in self.cycle_vertices def are_all_predecessors_fulfilled(self, vertex_id: str) -> bool: return not any(self.run_predecessors.get(vertex_id, [])) - def remove_from_predecessors(self, vertex_id: str): + def remove_from_predecessors(self, vertex_id: str) -> None: """Removes a vertex from the predecessor list of its successors.""" predecessors = self.run_map.get(vertex_id, []) for predecessor in predecessors: if vertex_id in self.run_predecessors[predecessor]: self.run_predecessors[predecessor].remove(vertex_id) - def build_run_map(self, predecessor_map, vertices_to_run): + def build_run_map(self, predecessor_map, vertices_to_run) -> None: """Builds a map of vertices and their runnable successors.""" self.run_map = defaultdict(list) for vertex_id, predecessors in predecessor_map.items(): @@ -78,16 +77,19 @@ def build_run_map(self, predecessor_map, vertices_to_run): self.run_predecessors = predecessor_map.copy() self.vertices_to_run = vertices_to_run - def update_vertex_run_state(self, vertex_id: str, is_runnable: bool): + def update_vertex_run_state(self, vertex_id: str, *, is_runnable: bool) -> None: """Updates the runnable state of a vertex.""" if is_runnable: self.vertices_to_run.add(vertex_id) else: self.vertices_being_run.discard(vertex_id) - def remove_vertex_from_runnables(self, v_id): + def remove_vertex_from_runnables(self, v_id) -> None: self.update_vertex_run_state(v_id, is_runnable=False) self.remove_from_predecessors(v_id) - def add_to_vertices_being_run(self, v_id): + def add_to_vertices_being_run(self, v_id) -> None: self.vertices_being_run.add(v_id) + + def add_to_cycle_vertices(self, v_id): + self.cycle_vertices.add(v_id) diff --git a/src/backend/base/langflow/graph/graph/schema.py b/src/backend/base/langflow/graph/graph/schema.py index 4a1dcc3a3a3e..4777abef622b 100644 --- a/src/backend/base/langflow/graph/graph/schema.py +++ b/src/backend/base/langflow/graph/graph/schema.py @@ -1,4 +1,6 @@ -from typing import TYPE_CHECKING, NamedTuple +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple, Protocol from typing_extensions import NotRequired, TypedDict @@ -8,6 +10,7 @@ if TYPE_CHECKING: from langflow.graph.schema import ResultData from langflow.graph.vertex.base import Vertex + from langflow.schema.log import LoggableType class ViewPort(TypedDict): @@ -31,11 +34,11 @@ class GraphDump(TypedDict, total=False): class VertexBuildResult(NamedTuple): - result_dict: "ResultData" + result_dict: ResultData params: str valid: bool artifacts: dict - vertex: "Vertex" + vertex: Vertex class OutputConfigDict(TypedDict): @@ -44,3 +47,7 @@ class OutputConfigDict(TypedDict): class StartConfigDict(TypedDict): output: OutputConfigDict + + +class LogCallbackFunction(Protocol): + def __call__(self, event_name: str, log: LoggableType) -> None: ... diff --git a/src/backend/base/langflow/graph/graph/state_manager.py b/src/backend/base/langflow/graph/graph/state_manager.py index f04667a3d855..38aed4b90740 100644 --- a/src/backend/base/langflow/graph/graph/state_manager.py +++ b/src/backend/base/langflow/graph/graph/state_manager.py @@ -1,43 +1,35 @@ -from typing import TYPE_CHECKING, Callable +from __future__ import annotations + +from typing import TYPE_CHECKING from loguru import logger from langflow.services.deps import get_settings_service, get_state_service if TYPE_CHECKING: + from collections.abc import Callable + from langflow.services.state.service import StateService class GraphStateManager: - def __init__(self): + def __init__(self) -> None: try: - self.state_service: "StateService" = get_state_service() - except Exception as e: - logger.debug(f"Error getting state service. Defaulting to InMemoryStateService: {e}") + self.state_service: StateService = get_state_service() + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error getting state service. Defaulting to InMemoryStateService") from langflow.services.state.service import InMemoryStateService self.state_service = InMemoryStateService(get_settings_service()) - def append_state(self, key, new_state, run_id: str): + def append_state(self, key, new_state, run_id: str) -> None: self.state_service.append_state(key, new_state, run_id) - def update_state(self, key, new_state, run_id: str): + def update_state(self, key, new_state, run_id: str) -> None: self.state_service.update_state(key, new_state, run_id) def get_state(self, key, run_id: str): return self.state_service.get_state(key, run_id) - def subscribe(self, key, observer: Callable): + def subscribe(self, key, observer: Callable) -> None: self.state_service.subscribe(key, observer) - - def notify_observers(self, key, new_state): - for callback in self.observers[key]: - callback(key, new_state, append=False) - - def notify_append_observers(self, key, new_state): - for callback in self.observers[key]: - try: - callback(key, new_state, append=True) - except Exception as e: - logger.error(f"Error in observer {callback} for key {key}: {e}") - logger.warning("Callbacks not implemented yet") diff --git a/src/backend/base/langflow/graph/graph/state_model.py b/src/backend/base/langflow/graph/graph/state_model.py index e747f079a79f..5792fbf379a2 100644 --- a/src/backend/base/langflow/graph/graph/state_model.py +++ b/src/backend/base/langflow/graph/graph/state_model.py @@ -5,13 +5,11 @@ def camel_to_snake(camel_str: str) -> str: - snake_str = re.sub(r"(? type[BaseModel]: - """ - Create a Pydantic state model from a graph representation. + """Create a Pydantic state model from a graph representation. This function generates a Pydantic model that represents the state of an entire graph. It creates getter methods for each vertex in the graph, allowing access to the state @@ -30,18 +28,18 @@ def create_state_model_from_graph(graph: BaseModel) -> type[BaseModel]: Raises: ValueError: If any vertex in the graph does not have a properly initialized - component instance (i.e., if vertex._custom_component is None). + component instance (i.e., if vertex.custom_component is None). Notes: - - Each vertex in the graph must have a '_custom_component' attribute. - - The '_custom_component' must have a 'get_state_model_instance_getter' method. + - Each vertex in the graph must have a 'custom_component' attribute. + - The 'custom_component' must have a 'get_state_model_instance_getter' method. - Vertex IDs are converted from camel case to snake case for the resulting model's field names. - The resulting model uses the 'create_state_model' function with validation disabled. Example: >>> class Vertex(BaseModel): ... id: str - ... _custom_component: Any + ... custom_component: Any >>> class Graph(BaseModel): ... vertices: List[Vertex] >>> # Assume proper setup of vertices and components @@ -52,16 +50,17 @@ def create_state_model_from_graph(graph: BaseModel) -> type[BaseModel]: >>> print(graph_state.some_component_name) """ for vertex in graph.vertices: - if hasattr(vertex, "_custom_component") and vertex._custom_component is None: - raise ValueError(f"Vertex {vertex.id} does not have a component instance.") + if hasattr(vertex, "custom_component") and vertex.custom_component is None: + msg = f"Vertex {vertex.id} does not have a component instance." + raise ValueError(msg) state_model_getters = [ - vertex._custom_component.get_state_model_instance_getter() + vertex.custom_component.get_state_model_instance_getter() for vertex in graph.vertices - if hasattr(vertex, "_custom_component") and hasattr(vertex._custom_component, "get_state_model_instance_getter") + if hasattr(vertex, "custom_component") and hasattr(vertex.custom_component, "get_state_model_instance_getter") ] fields = { camel_to_snake(vertex.id): state_model_getter - for vertex, state_model_getter in zip(graph.vertices, state_model_getters) + for vertex, state_model_getter in zip(graph.vertices, state_model_getters, strict=False) } return create_state_model(model_name="GraphStateModel", validate=False, **fields) diff --git a/src/backend/base/langflow/graph/graph/utils.py b/src/backend/base/langflow/graph/graph/utils.py index 21e00f696df4..02ea8e0b7b09 100644 --- a/src/backend/base/langflow/graph/graph/utils.py +++ b/src/backend/base/langflow/graph/graph/utils.py @@ -1,13 +1,13 @@ import copy from collections import defaultdict, deque -from typing import Dict, List + +import networkx as nx PRIORITY_LIST_OF_INPUTS = ["webhook", "chat"] def find_start_component_id(vertices): - """ - Finds the component ID from a list of vertices based on a priority list of input types. + """Finds the component ID from a list of vertices based on a priority list of input types. Args: vertices (list): A list of vertex IDs. @@ -23,24 +23,18 @@ def find_start_component_id(vertices): def find_last_node(nodes, edges): - """ - This function receives a flow and returns the last node. - """ + """This function receives a flow and returns the last node.""" return next((n for n in nodes if all(e["source"] != n["id"] for e in edges)), None) -def add_parent_node_id(nodes, parent_node_id): - """ - This function receives a list of nodes and adds a parent_node_id to each node. - """ +def add_parent_node_id(nodes, parent_node_id) -> None: + """This function receives a list of nodes and adds a parent_node_id to each node.""" for node in nodes: node["parent_node_id"] = parent_node_id -def add_frozen(nodes, frozen): - """ - This function receives a list of nodes and adds a frozen to each node. - """ +def add_frozen(nodes, frozen) -> None: + """This function receives a list of nodes and adds a frozen to each node.""" for node in nodes: node["data"]["node"]["frozen"] = frozen @@ -81,7 +75,7 @@ def process_flow(flow_object): cloned_flow = copy.deepcopy(flow_object) processed_nodes = set() # To keep track of processed nodes - def process_node(node): + def process_node(node) -> None: node_id = node.get("id") # If node already processed, skip @@ -106,9 +100,8 @@ def process_node(node): return cloned_flow -def update_template(template, g_nodes): - """ - Updates the template of a node in a graph with the given template. +def update_template(template, g_nodes) -> None: + """Updates the template of a node in a graph with the given template. Args: template (dict): The new template to update the node with. @@ -117,7 +110,7 @@ def update_template(template, g_nodes): Returns: None """ - for _, value in template.items(): + for value in template.values(): if not value.get("proxy"): continue proxy_dict = value["proxy"] @@ -138,14 +131,12 @@ def update_template(template, g_nodes): g_nodes[node_index]["data"]["node"]["template"][field]["display_name"] = display_name -def update_target_handle(new_edge, g_nodes, group_node_id): - """ - Updates the target handle of a given edge if it is a proxy node. +def update_target_handle(new_edge, g_nodes): + """Updates the target handle of a given edge if it is a proxy node. Args: new_edge (dict): The edge to update. g_nodes (list): The list of nodes in the graph. - group_node_id (str): The ID of the group node. Returns: dict: The updated edge. @@ -158,9 +149,8 @@ def update_target_handle(new_edge, g_nodes, group_node_id): return new_edge -def set_new_target_handle(proxy_id, new_edge, target_handle, node): - """ - Sets a new target handle for a given edge. +def set_new_target_handle(proxy_id, new_edge, target_handle, node) -> None: + """Sets a new target handle for a given edge. Args: proxy_id (str): The ID of the proxy. @@ -174,7 +164,8 @@ def set_new_target_handle(proxy_id, new_edge, target_handle, node): new_edge["target"] = proxy_id _type = target_handle.get("type") if _type is None: - raise KeyError("The 'type' key must be present in target_handle.") + msg = "The 'type' key must be present in target_handle." + raise KeyError(msg) field = target_handle["proxy"]["field"] new_target_handle = { @@ -193,12 +184,12 @@ def set_new_target_handle(proxy_id, new_edge, target_handle, node): def update_source_handle(new_edge, g_nodes, g_edges): - """ - Updates the source handle of a given edge to the last node in the flow data. + """Updates the source handle of a given edge to the last node in the flow data. Args: new_edge (dict): The edge to update. - flow_data (dict): The flow data containing the nodes and edges. + g_nodes: The graph nodes. + g_edges: The graph edges. Returns: dict: The updated edge with the new source handle. @@ -212,13 +203,15 @@ def update_source_handle(new_edge, g_nodes, g_edges): def get_updated_edges(base_flow, g_nodes, g_edges, group_node_id): - """ + """Get updated edges. + Given a base flow, a list of graph nodes and a group node id, returns a list of updated edges. An updated edge is an edge that has its target or source handle updated based on the group node id. Args: base_flow (dict): The base flow containing a list of edges. g_nodes (list): A list of graph nodes. + g_edges (list): A list of graph edges. group_node_id (str): The id of the group node. Returns: @@ -228,17 +221,17 @@ def get_updated_edges(base_flow, g_nodes, g_edges, group_node_id): for edge in base_flow["edges"]: new_edge = copy.deepcopy(edge) if new_edge["target"] == group_node_id: - new_edge = update_target_handle(new_edge, g_nodes, group_node_id) + new_edge = update_target_handle(new_edge, g_nodes) if new_edge["source"] == group_node_id: new_edge = update_source_handle(new_edge, g_nodes, g_edges) - if edge["target"] == group_node_id or edge["source"] == group_node_id: + if group_node_id in {edge["target"], edge["source"]}: updated_edges.append(new_edge) return updated_edges -def get_successors(graph: Dict[str, Dict[str, List[str]]], vertex_id: str) -> List[str]: +def get_successors(graph: dict[str, dict[str, list[str]]], vertex_id: str) -> list[str]: successors_result = [] stack = [vertex_id] visited = set() @@ -247,17 +240,50 @@ def get_successors(graph: Dict[str, Dict[str, List[str]]], vertex_id: str) -> Li if current_id in visited: continue visited.add(current_id) - successors_result.append(current_id) + if current_id != vertex_id: + successors_result.append(current_id) stack.extend(graph[current_id]["successors"]) return successors_result -def sort_up_to_vertex(graph: Dict[str, Dict[str, List[str]]], vertex_id: str, is_start: bool = False) -> List[str]: +def get_root_of_group_node( + graph: dict[str, dict[str, list[str]]], vertex_id: str, parent_node_map: dict[str, str | None] +) -> str: + """Returns the root of a group node.""" + if vertex_id in parent_node_map.values(): + # Get all vertices with vertex_id as their parent node + child_vertices = [v_id for v_id, parent_id in parent_node_map.items() if parent_id == vertex_id] + + # Now go through successors of the child vertices + # and get the one that none of its successors is in child_vertices + for child_id in child_vertices: + successors = get_successors(graph, child_id) + if not any(successor in child_vertices for successor in successors): + return child_id + + msg = f"Vertex {vertex_id} is not a top level vertex or no root vertex found" + raise ValueError(msg) + + +def sort_up_to_vertex( + graph: dict[str, dict[str, list[str]]], + vertex_id: str, + *, + parent_node_map: dict[str, str | None] | None = None, + is_start: bool = False, +) -> list[str]: """Cuts the graph up to a given vertex and sorts the resulting subgraph.""" try: stop_or_start_vertex = graph[vertex_id] - except KeyError: - raise ValueError(f"Vertex {vertex_id} not found into graph") + except KeyError as e: + if parent_node_map is None: + msg = "Parent node map is required to find the root of a group node" + raise ValueError(msg) from e + vertex_id = get_root_of_group_node(graph=graph, vertex_id=vertex_id, parent_node_map=parent_node_map) + if vertex_id not in graph: + msg = f"Vertex {vertex_id} not found into graph" + raise ValueError(msg) from e + stop_or_start_vertex = graph[vertex_id] visited, excluded = set(), set() stack = [vertex_id] @@ -289,8 +315,7 @@ def sort_up_to_vertex(graph: Dict[str, Dict[str, List[str]]], vertex_id: str, is def has_cycle(vertex_ids: list[str], edges: list[tuple[str, str]]) -> bool: - """ - Determines whether a directed graph represented by a list of vertices and edges contains a cycle. + """Determines whether a directed graph represented by a list of vertices and edges contains a cycle. Args: vertex_ids (list[str]): A list of vertex IDs. @@ -305,7 +330,7 @@ def has_cycle(vertex_ids: list[str], edges: list[tuple[str, str]]) -> bool: graph[u].append(v) # Utility function to perform DFS - def dfs(v, visited, rec_stack): + def dfs(v, visited, rec_stack) -> bool: visited.add(v) rec_stack.add(v) @@ -322,17 +347,11 @@ def dfs(v, visited, rec_stack): visited: set[str] = set() rec_stack: set[str] = set() - for vertex in vertex_ids: - if vertex not in visited: - if dfs(vertex, visited, rec_stack): - return True - - return False + return any(vertex not in visited and dfs(vertex, visited, rec_stack) for vertex in vertex_ids) def find_cycle_edge(entry_point: str, edges: list[tuple[str, str]]) -> tuple[str, str]: - """ - Find the edge that causes a cycle in a directed graph starting from a given entry point. + """Find the edge that causes a cycle in a directed graph starting from a given entry point. Args: entry_point (str): The vertex ID from which to start the search. @@ -369,8 +388,7 @@ def dfs(v, visited, rec_stack): def find_all_cycle_edges(entry_point: str, edges: list[tuple[str, str]]) -> list[tuple[str, str]]: - """ - Find all edges that cause cycles in a directed graph starting from a given entry point. + """Find all edges that cause cycles in a directed graph starting from a given entry point. Args: entry_point (str): The vertex ID from which to start the search. @@ -410,3 +428,16 @@ def should_continue(yielded_counts: dict[str, int], max_iterations: int | None) if max_iterations is None: return True return max(yielded_counts.values(), default=0) <= max_iterations + + +def find_cycle_vertices(edges): + # Create a directed graph from the edges + graph = nx.DiGraph(edges) + + # Find all simple cycles in the graph + cycles = list(nx.simple_cycles(graph)) + + # Flatten the list of cycles and remove duplicates + cycle_vertices = {vertex for cycle in cycles for vertex in cycle} + + return sorted(cycle_vertices) diff --git a/src/backend/base/langflow/graph/schema.py b/src/backend/base/langflow/graph/schema.py index fdabcdaaa64a..dc643d647b5a 100644 --- a/src/backend/base/langflow/graph/schema.py +++ b/src/backend/base/langflow/graph/schema.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, List, Optional +from typing import Any from pydantic import BaseModel, Field, field_serializer, model_validator @@ -9,16 +9,16 @@ class ResultData(BaseModel): - results: Optional[Any] = Field(default_factory=dict) - artifacts: Optional[Any] = Field(default_factory=dict) - outputs: Optional[dict] = Field(default_factory=dict) - logs: Optional[dict] = Field(default_factory=dict) - messages: Optional[list[ChatOutputResponse]] = Field(default_factory=list) - timedelta: Optional[float] = None - duration: Optional[str] = None - component_display_name: Optional[str] = None - component_id: Optional[str] = None - used_frozen_result: Optional[bool] = False + results: Any | None = Field(default_factory=dict) + artifacts: Any | None = Field(default_factory=dict) + outputs: dict | None = Field(default_factory=dict) + logs: dict | None = Field(default_factory=dict) + messages: list[ChatOutputResponse] | None = Field(default_factory=list) + timedelta: float | None = None + duration: str | None = None + component_display_name: str | None = None + component_id: str | None = None + used_frozen_result: bool | None = False @field_serializer("results") def serialize_results(self, value): @@ -48,8 +48,6 @@ def validate_model(cls, values): class InterfaceComponentTypes(str, Enum, metaclass=ContainsEnumMeta): - # ChatInput and ChatOutput are the only ones that are - # power components ChatInput = "ChatInput" ChatOutput = "ChatOutput" TextInput = "TextInput" @@ -57,14 +55,6 @@ class InterfaceComponentTypes(str, Enum, metaclass=ContainsEnumMeta): DataOutput = "DataOutput" WebhookInput = "Webhook" - def __contains__(cls, item): - try: - cls(item) - except ValueError: - return False - else: - return True - CHAT_COMPONENTS = [InterfaceComponentTypes.ChatInput, InterfaceComponentTypes.ChatOutput] RECORDS_COMPONENTS = [InterfaceComponentTypes.DataOutput] @@ -82,4 +72,4 @@ def __contains__(cls, item): class RunOutputs(BaseModel): inputs: dict = Field(default_factory=dict) - outputs: List[Optional[ResultData]] = Field(default_factory=list) + outputs: list[ResultData | None] = Field(default_factory=list) diff --git a/src/backend/base/langflow/graph/state/model.py b/src/backend/base/langflow/graph/state/model.py index a3bac48a89dc..b56e886389ba 100644 --- a/src/backend/base/langflow/graph/state/model.py +++ b/src/backend/base/langflow/graph/state/model.py @@ -1,12 +1,12 @@ -from typing import Any, Callable, get_type_hints +from collections.abc import Callable +from typing import Any, get_type_hints from pydantic import ConfigDict, computed_field, create_model from pydantic.fields import FieldInfo def __validate_method(method: Callable) -> None: - """ - Validates a method by checking if it has the required attributes. + """Validates a method by checking if it has the required attributes. This function ensures that the given method belongs to a class with the necessary structure for output handling. It checks for the presence of a __self__ attribute @@ -29,14 +29,15 @@ class does not have a get_output_by_method attribute. >>> __validate_method(lambda x: x) # This will raise a ValueError """ if not hasattr(method, "__self__"): - raise ValueError(f"Method {method} does not have a __self__ attribute.") + msg = f"Method {method} does not have a __self__ attribute." + raise ValueError(msg) if not hasattr(method.__self__, "get_output_by_method"): - raise ValueError(f"Method's class {method.__self__} must have a get_output_by_method attribute.") + msg = f"Method's class {method.__self__} must have a get_output_by_method attribute." + raise ValueError(msg) -def build_output_getter(method: Callable, validate: bool = True) -> Callable: - """ - Builds an output getter function for a given method in a graph component. +def build_output_getter(method: Callable, *, validate: bool = True) -> Callable: + """Builds an output getter function for a given method in a graph component. This function creates a new callable that, when invoked, retrieves the output of the specified method using the get_output_by_method of the method's class. @@ -81,14 +82,14 @@ def output_getter(_): return_type = get_type_hints(method).get("return", None) if return_type is None: - raise ValueError(f"Method {method.__name__} has no return type annotation.") + msg = f"Method {method.__name__} has no return type annotation." + raise ValueError(msg) output_getter.__annotations__["return"] = return_type return output_getter -def build_output_setter(method: Callable, validate: bool = True) -> Callable: - """ - Build an output setter function for a given method in a graph component. +def build_output_setter(method: Callable, *, validate: bool = True) -> Callable: + """Build an output setter function for a given method in a graph component. This function creates a new callable that, when invoked, sets the output of the specified method using the get_output_by_method of the method's class. @@ -125,19 +126,18 @@ def build_output_setter(method: Callable, validate: bool = True) -> Callable: >>> print(component.get_output_by_method(component.set_message).value) # Prints "New message" """ - def output_setter(self, value): + def output_setter(self, value) -> None: # noqa: ARG001 if validate: __validate_method(method) - methods_class = method.__self__ + methods_class = method.__self__ # type: ignore[attr-defined] output = methods_class.get_output_by_method(method) output.value = value return output_setter -def create_state_model(model_name: str = "State", validate: bool = True, **kwargs) -> type: - """ - Create a dynamic Pydantic state model based on the provided keyword arguments. +def create_state_model(model_name: str = "State", *, validate: bool = True, **kwargs) -> type: + """Create a dynamic Pydantic state model based on the provided keyword arguments. This function generates a Pydantic model class with fields corresponding to the provided keyword arguments. It can handle various types of field definitions, @@ -206,32 +206,32 @@ def create_state_model(model_name: str = "State", validate: bool = True, **kwarg # Define the field with the return type try: __validate_method(value) - getter = build_output_getter(value, validate) - setter = build_output_setter(value, validate) + getter = build_output_getter(value, validate=validate) + setter = build_output_setter(value, validate=validate) property_method = property(getter, setter) except ValueError as e: # If the method is not valid,assume it is already a getter - if "get_output_by_method" not in str(e) and "__self__" not in str(e) or validate: - raise e + if ("get_output_by_method" not in str(e) and "__self__" not in str(e)) or validate: + raise property_method = value fields[name] = computed_field(property_method) elif isinstance(value, FieldInfo): field_tuple = (value.annotation or Any, value) fields[name] = field_tuple - elif isinstance(value, tuple) and len(value) == 2: + elif isinstance(value, tuple) and len(value) == 2: # noqa: PLR2004 # Fields are defined by one of the following tuple forms: # (, ) # (, Field(...)) # typing.Annotated[, Field(...)] if not isinstance(value[0], type): - raise ValueError(f"Invalid type for field {name}: {type(value[0])}") + msg = f"Invalid type for field {name}: {type(value[0])}" + raise TypeError(msg) fields[name] = (value[0], value[1]) else: - raise ValueError(f"Invalid value type {type(value)} for field {name}") + msg = f"Invalid value type {type(value)} for field {name}" + raise ValueError(msg) # Create the model dynamically config_dict = ConfigDict(arbitrary_types_allowed=True, validate_assignment=True) - model = create_model(model_name, __config__=config_dict, **fields) - - return model + return create_model(model_name, __config__=config_dict, **fields) diff --git a/src/backend/base/langflow/graph/utils.py b/src/backend/base/langflow/graph/utils.py index bcc784a10f44..5ceee698ba9c 100644 --- a/src/backend/base/langflow/graph/utils.py +++ b/src/backend/base/langflow/graph/utils.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import json +from collections.abc import Generator from enum import Enum -from typing import TYPE_CHECKING, Any, Generator, Optional, Union +from typing import TYPE_CHECKING, Any from uuid import UUID from langchain_core.documents import Document @@ -54,7 +57,7 @@ def fix_prompt(prompt: str): return prompt + " {input}" -def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: +def flatten_list(list_of_lists: list[list | Any]) -> list: """Flatten list of lists.""" new_list = [] for item in list_of_lists: @@ -66,21 +69,22 @@ def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: def serialize_field(value): - """Unified serialization function for handling both BaseModel and Document types, - including handling lists of these types.""" + """Serialize field. - if isinstance(value, (list, tuple)): + Unified serialization function for handling both BaseModel and Document types, + including handling lists of these types. + """ + if isinstance(value, list | tuple): return [serialize_field(v) for v in value] - elif isinstance(value, Document): + if isinstance(value, Document): return value.to_json() - elif isinstance(value, BaseModel): + if isinstance(value, BaseModel): return value.model_dump() - elif isinstance(value, V1BaseModel): + if isinstance(value, V1BaseModel): if hasattr(value, "to_json"): return value.to_json() - else: - return value.dict() - elif isinstance(value, str): + return value.dict() + if isinstance(value, str): return {"result": value} return value @@ -103,11 +107,10 @@ def get_artifact_type(value, build_result) -> str: case Message(): result = ArtifactType.MESSAGE - if result == ArtifactType.UNKNOWN: - if isinstance(build_result, Generator): - result = ArtifactType.STREAM - elif isinstance(value, Message) and isinstance(value.text, Generator): - result = ArtifactType.STREAM + if result == ArtifactType.UNKNOWN and ( + isinstance(build_result, Generator) or (isinstance(value, Message) and isinstance(value.text, Generator)) + ): + result = ArtifactType.STREAM return result.value @@ -119,27 +122,30 @@ def post_process_raw(raw, artifact_type: str): return raw -def _vertex_to_primitive_dict(target: "Vertex") -> dict: - """ - Cleans the parameters of the target vertex. - """ +def _vertex_to_primitive_dict(target: Vertex) -> dict: + """Cleans the parameters of the target vertex.""" # Removes all keys that the values aren't python types like str, int, bool, etc. params = { - key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict)) + key: value for key, value in target.params.items() if isinstance(value, str | int | bool | float | list | dict) } # if it is a list we need to check if the contents are python types for key, value in params.items(): if isinstance(value, list): - params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))] + params[key] = [item for item in value if isinstance(item, str | int | bool | float | list | dict)] return params async def log_transaction( - flow_id: Union[str, UUID], source: "Vertex", status, target: Optional["Vertex"] = None, error=None + flow_id: str | UUID, source: Vertex, status, target: Vertex | None = None, error=None ) -> None: try: if not get_settings_service().settings.transactions_storage_enabled: return + if not flow_id: + if source.graph.flow_id: + flow_id = source.graph.flow_id + else: + return inputs = _vertex_to_primitive_dict(source) transaction = TransactionBase( vertex_id=source.id, @@ -154,18 +160,19 @@ async def log_transaction( with session_getter(get_db_service()) as session: inserted = crud_log_transaction(session, transaction) logger.debug(f"Logged transaction: {inserted.id}") - except Exception as e: - logger.error(f"Error logging transaction: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error logging transaction") def log_vertex_build( + *, flow_id: str, vertex_id: str, valid: bool, params: Any, - data: "ResultDataResponse", - artifacts: Optional[dict] = None, -): + data: ResultDataResponse, + artifacts: dict | None = None, +) -> None: try: if not get_settings_service().settings.vertex_builds_storage_enabled: return @@ -182,5 +189,21 @@ def log_vertex_build( with session_getter(get_db_service()) as session: inserted = crud_log_vertex_build(session, vertex_build) logger.debug(f"Logged vertex build: {inserted.build_id}") - except Exception as e: - logger.exception(f"Error logging vertex build: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error logging vertex build") + + +def rewrite_file_path(file_path: str): + file_path = file_path.replace("\\", "/") + + if ":" in file_path: + file_path = file_path.split(":", 1)[-1] + + file_path_split = [part for part in file_path.split("/") if part] + + if len(file_path_split) > 1: + consistent_file_path = f"{file_path_split[-2]}/{file_path_split[-1]}" + else: + consistent_file_path = "/".join(file_path_split) + + return [consistent_file_path] diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index d6cad40bb054..7e53e84249a3 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -1,20 +1,21 @@ +from __future__ import annotations + import ast import asyncio import inspect import os import traceback import types -import json +from collections.abc import AsyncIterator, Callable, Iterator, Mapping from enum import Enum -from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, Iterator, List, Mapping, Optional, Set +from typing import TYPE_CHECKING, Any import pandas as pd from loguru import logger -from langflow.exceptions.component import ComponentBuildException +from langflow.exceptions.component import ComponentBuildError from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData from langflow.graph.utils import UnbuiltObject, UnbuiltResult, log_transaction -from langflow.graph.vertex.schema import NodeData from langflow.interface import initialize from langflow.interface.listing import lazy_load_dict from langflow.schema.artifact import ArtifactType @@ -22,33 +23,38 @@ from langflow.schema.message import Message from langflow.schema.schema import INPUT_FIELD_NAME, OutputValue, build_output_logs from langflow.services.deps import get_storage_service -from langflow.services.tracing.schema import Log from langflow.utils.constants import DIRECT_TYPES from langflow.utils.schemas import ChatOutputResponse from langflow.utils.util import sync_to_async, unescape_string if TYPE_CHECKING: + from uuid import UUID + from langflow.custom import Component + from langflow.events.event_manager import EventManager from langflow.graph.edge.base import CycleEdge, Edge from langflow.graph.graph.base import Graph + from langflow.graph.vertex.schema import NodeData + from langflow.services.tracing.schema import Log class VertexStates(str, Enum): """Vertex are related to it being active, inactive, or in an error state.""" - ACTIVE = "active" - INACTIVE = "inactive" - ERROR = "error" + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" + ERROR = "ERROR" class Vertex: def __init__( self, data: NodeData, - graph: "Graph", - base_type: Optional[str] = None, + graph: Graph, + *, + base_type: str | None = None, is_task: bool = False, - params: Optional[Dict] = None, + params: dict | None = None, ) -> None: # is_external means that the Vertex send or receives data from # an external source (e.g the chat) @@ -61,72 +67,70 @@ def __init__( self.is_input = any(input_component_name in self.id for input_component_name in INPUT_COMPONENTS) self.is_output = any(output_component_name in self.id for output_component_name in OUTPUT_COMPONENTS) self.has_session_id = None - self._custom_component = None + self.custom_component = None self.has_external_input = False self.has_external_output = False self.graph = graph - self._data = data.copy() - self.base_type: Optional[str] = base_type - self.outputs: List[Dict] = [] - self._parse_data() - self._built_object = UnbuiltObject() - self._built_result = None - self._built = False - self._successors_ids: Optional[List[str]] = None - self.artifacts: Dict[str, Any] = {} - self.artifacts_raw: Dict[str, Any] = {} - self.artifacts_type: Dict[str, str] = {} - self.steps: List[Callable] = [self._build] - self.steps_ran: List[Callable] = [] - self.task_id: Optional[str] = None + self.full_data = data.copy() + self.base_type: str | None = base_type + self.outputs: list[dict] = [] + self.parse_data() + self.built_object: Any = UnbuiltObject() + self.built_result: Any = None + self.built = False + self._successors_ids: list[str] | None = None + self.artifacts: dict[str, Any] = {} + self.artifacts_raw: dict[str, Any] = {} + self.artifacts_type: dict[str, str] = {} + self.steps: list[Callable] = [self._build] + self.steps_ran: list[Callable] = [] + self.task_id: str | None = None self.is_task = is_task self.params = params or {} - self.parent_node_id: Optional[str] = self._data.get("parent_node_id") - self.load_from_db_fields: List[str] = [] + self.parent_node_id: str | None = self.full_data.get("parent_node_id") + self.load_from_db_fields: list[str] = [] self.parent_is_top_level = False self.layer = None - self.result: Optional[ResultData] = None - self.results: Dict[str, Any] = {} - self.outputs_logs: Dict[str, OutputValue] = {} - self.logs: Dict[str, Log] = {} + self.result: ResultData | None = None + self.results: dict[str, Any] = {} + self.outputs_logs: dict[str, OutputValue] = {} + self.logs: dict[str, list[Log]] = {} + self.has_cycle_edges = False try: self.is_interface_component = self.vertex_type in InterfaceComponentTypes except ValueError: self.is_interface_component = False self.use_result = False - self.build_times: List[float] = [] + self.build_times: list[float] = [] self.state = VertexStates.ACTIVE + self.log_transaction_tasks: set[asyncio.Task] = set() - def set_input_value(self, name: str, value: Any): - if self._custom_component is None: - raise ValueError(f"Vertex {self.id} does not have a component instance.") - self._custom_component._set_input_value(name, value) + def set_input_value(self, name: str, value: Any) -> None: + if self.custom_component is None: + msg = f"Vertex {self.id} does not have a component instance." + raise ValueError(msg) + self.custom_component._set_input_value(name, value) def to_data(self): - try: - data = json.loads(json.dumps(self._data, default=str)) - except TypeError: - data = self._data - - return data + return self.full_data - def add_component_instance(self, component_instance: "Component"): + def add_component_instance(self, component_instance: Component) -> None: component_instance.set_vertex(self) - self._custom_component = component_instance + self.custom_component = component_instance - def add_result(self, name: str, result: Any): + def add_result(self, name: str, result: Any) -> None: self.results[name] = result - def update_graph_state(self, key, new_state, append: bool): + def update_graph_state(self, key, new_state, *, append: bool) -> None: if append: self.graph.append_state(key, new_state, caller=self.id) else: self.graph.update_state(key, new_state, caller=self.id) - def set_state(self, state: str): + def set_state(self, state: str) -> None: self.state = VertexStates[state] - if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] < 2: + if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] <= 1: # If the vertex is inactive and has only one in degree # it means that it is not a merge point in the graph self.graph.inactivated_vertices.add(self.id) @@ -140,7 +144,7 @@ def is_active(self): def avg_build_time(self): return sum(self.build_times) / len(self.build_times) if self.build_times else 0 - def add_build_time(self, time): + def add_build_time(self, time) -> None: self.build_times.append(time) def set_result(self, result: ResultData) -> None: @@ -150,78 +154,80 @@ def get_built_result(self): # If the Vertex.type is a power component # then we need to return the built object # instead of the result dict - if self.is_interface_component and not isinstance(self._built_object, UnbuiltObject): - result = self._built_object + if self.is_interface_component and not isinstance(self.built_object, UnbuiltObject): + result = self.built_object # if it is not a dict or a string and hasattr model_dump then # return the model_dump - if not isinstance(result, (dict, str)) and hasattr(result, "content"): + if not isinstance(result, dict | str) and hasattr(result, "content"): return result.content return result - if isinstance(self._built_object, str): - self._built_result = self._built_object + if isinstance(self.built_object, str): + self.built_result = self.built_object - if isinstance(self._built_result, UnbuiltResult): + if isinstance(self.built_result, UnbuiltResult): return {} - return self._built_result if isinstance(self._built_result, dict) else {"result": self._built_result} + return self.built_result if isinstance(self.built_result, dict) else {"result": self.built_result} def set_artifacts(self) -> None: pass @property - def edges(self) -> List["CycleEdge"]: + def edges(self) -> list[CycleEdge]: return self.graph.get_vertex_edges(self.id) @property - def outgoing_edges(self) -> List["CycleEdge"]: + def outgoing_edges(self) -> list[CycleEdge]: return [edge for edge in self.edges if edge.source_id == self.id] @property - def incoming_edges(self) -> List["CycleEdge"]: + def incoming_edges(self) -> list[CycleEdge]: return [edge for edge in self.edges if edge.target_id == self.id] @property - def edges_source_names(self) -> Set[str | None]: + def edges_source_names(self) -> set[str | None]: return {edge.source_handle.name for edge in self.edges} @property - def predecessors(self) -> List["Vertex"]: + def predecessors(self) -> list[Vertex]: return self.graph.get_predecessors(self) @property - def successors(self) -> List["Vertex"]: + def successors(self) -> list[Vertex]: return self.graph.get_successors(self) @property - def successors_ids(self) -> List[str]: + def successors_ids(self) -> list[str]: return self.graph.successor_map.get(self.id, []) def __getstate__(self): state = self.__dict__.copy() state["_lock"] = None # Locks are not serializable - state["_built_object"] = None if isinstance(self._built_object, UnbuiltObject) else self._built_object - state["_built_result"] = None if isinstance(self._built_result, UnbuiltResult) else self._built_result + state["built_object"] = None if isinstance(self.built_object, UnbuiltObject) else self.built_object + state["built_result"] = None if isinstance(self.built_result, UnbuiltResult) else self.built_result return state def __setstate__(self, state): self.__dict__.update(state) self._lock = asyncio.Lock() # Reinitialize the lock - self._built_object = state.get("_built_object") or UnbuiltObject() - self._built_result = state.get("_built_result") or UnbuiltResult() + self.built_object = state.get("built_object") or UnbuiltObject() + self.built_result = state.get("built_result") or UnbuiltResult() - def set_top_level(self, top_level_vertices: List[str]) -> None: + def set_top_level(self, top_level_vertices: list[str]) -> None: self.parent_is_top_level = self.parent_node_id in top_level_vertices - def _parse_data(self) -> None: - self.data = self._data["data"] + def parse_data(self) -> None: + self.data = self.full_data["data"] if self.data["node"]["template"]["_type"] == "Component": if "outputs" not in self.data["node"]: - raise ValueError(f"Outputs not found for {self.display_name}") + msg = f"Outputs not found for {self.display_name}" + raise ValueError(msg) self.outputs = self.data["node"]["outputs"] else: self.outputs = self.data["node"].get("outputs", []) self.output = self.data["node"]["base_classes"] self.display_name: str = self.data["node"].get("display_name", self.id.split("-")[0]) + self.icon: str = self.data["node"].get("icon", self.id.split("-")[0]) self.description: str = self.data["node"].get("description", "") self.frozen: bool = self.data["node"].get("frozen", False) @@ -251,7 +257,7 @@ def _parse_data(self) -> None: ) if self.base_type is None: - for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items(): + for base_type, value in lazy_load_dict.all_types_dict.items(): if self.vertex_type in value: self.base_type = base_type break @@ -259,17 +265,18 @@ def _parse_data(self) -> None: def get_value_from_template_dict(self, key: str): template_dict = self.data.get("node", {}).get("template", {}) if key not in template_dict: - raise ValueError(f"Key {key} not found in template dict") + msg = f"Key {key} not found in template dict" + raise ValueError(msg) return template_dict.get(key, {}).get("value") def get_task(self): # using the task_id, get the task from celery # and return it - from celery.result import AsyncResult # type: ignore + from celery.result import AsyncResult return AsyncResult(self.task_id) - def _set_params_from_normal_edge(self, params: dict, edge: "Edge", template_dict: dict): + def _set_params_from_normal_edge(self, params: dict, edge: Edge, template_dict: dict): param_key = edge.target_param # If the param_key is in the template_dict and the edge.target_id is the current node @@ -288,13 +295,13 @@ def _set_params_from_normal_edge(self, params: dict, edge: "Edge", template_dict if not param_dict or len(param_dict) != 1: params[param_key] = self.graph.get_vertex(edge.source_id) else: - params[param_key] = {key: self.graph.get_vertex(edge.source_id) for key in param_dict.keys()} + params[param_key] = {key: self.graph.get_vertex(edge.source_id) for key in param_dict} else: params[param_key] = self.graph.get_vertex(edge.source_id) return params - def _build_params(self): + def build_params(self) -> None: # sourcery skip: merge-list-append, remove-redundant-if # Some params are required, some are optional # but most importantly, some params are python base classes @@ -312,14 +319,15 @@ def _build_params(self): # and use that as the value for the param if self.graph is None: - raise ValueError("Graph not found") + msg = "Graph not found" + raise ValueError(msg) if self.updated_raw_params: self.updated_raw_params = False return template_dict = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)} - params = {} + params: dict = {} for edge in self.edges: if not hasattr(edge, "target_param"): @@ -349,7 +357,7 @@ def _build_params(self): if "too many values to unpack" in str(e): full_path = file_path else: - raise e + raise params[field_name] = full_path elif field.get("required"): field_display_name = field.get("display_name") @@ -358,20 +366,23 @@ def _build_params(self): "Setting to None." ) params[field_name] = None + elif field["list"]: + params[field_name] = [] else: - if field["list"]: - params[field_name] = [] - else: - params[field_name] = None + params[field_name] = None elif field.get("type") in DIRECT_TYPES and params.get(field_name) is None: val = field.get("value") if field.get("type") == "code": try: - params[field_name] = ast.literal_eval(val) if val else None - except Exception: + if field_name == "code": + params[field_name] = val + else: + params[field_name] = ast.literal_eval(val) if val else None + except Exception: # noqa: BLE001 + logger.debug(f"Error evaluating code for {field_name}") params[field_name] = val - elif field.get("type") in ["dict", "NestedDict"]: + elif field.get("type") in {"dict", "NestedDict"}: # When dict comes from the frontend it comes as a # list of dicts, so we need to convert it to a dict # before passing it to the build method @@ -403,19 +414,18 @@ def _build_params(self): if isinstance(val, bool): params[field_name] = val elif isinstance(val, str): - params[field_name] = val != "" + params[field_name] = bool(val) elif field.get("type") == "table" and val is not None: # check if the value is a list of dicts # if it is, create a pandas dataframe from it if isinstance(val, list) and all(isinstance(item, dict) for item in val): params[field_name] = pd.DataFrame(val) else: - raise ValueError(f"Invalid value type {type(val)} for field {field_name}") + msg = f"Invalid value type {type(val)} for field {field_name}" + raise ValueError(msg) elif val is not None and val != "": params[field_name] = val - elif val is not None and val != "": - params[field_name] = val if field.get("load_from_db"): load_from_db_fields.append(field_name) @@ -427,60 +437,75 @@ def _build_params(self): # Add _type to params self.params = params self.load_from_db_fields = load_from_db_fields - self._raw_params = params.copy() + self.raw_params = params.copy() - def update_raw_params(self, new_params: Mapping[str, str | list[str]], overwrite: bool = False): - """ - Update the raw parameters of the vertex with the given new parameters. + def update_raw_params(self, new_params: Mapping[str, str | list[str]], *, overwrite: bool = False) -> None: + """Update the raw parameters of the vertex with the given new parameters. Args: new_params (Dict[str, Any]): The new parameters to update. + overwrite (bool, optional): Whether to overwrite the existing parameters. + Defaults to False. Raises: - ValueError: If any key in new_params is not found in self._raw_params. + ValueError: If any key in new_params is not found in self.raw_params. """ - # First check if the input_value in _raw_params is not a vertex + # First check if the input_value in raw_params is not a vertex if not new_params: return - if any(isinstance(self._raw_params.get(key), Vertex) for key in new_params): + if any(isinstance(self.raw_params.get(key), Vertex) for key in new_params): return if not overwrite: - for key in new_params.copy(): # type: ignore - if key not in self._raw_params: - new_params.pop(key) # type: ignore - self._raw_params.update(new_params) - self.params = self._raw_params.copy() + for key in new_params.copy(): # type: ignore[attr-defined] + if key not in self.raw_params: + new_params.pop(key) # type: ignore[attr-defined] + self.raw_params.update(new_params) + self.params = self.raw_params.copy() self.updated_raw_params = True + def instantiate_component(self, user_id=None) -> None: + if not self.custom_component: + self.custom_component, _ = initialize.loading.instantiate_class( + user_id=user_id, + vertex=self, + ) + async def _build( self, fallback_to_env_vars, user_id=None, - ): - """ - Initiate the build process. - """ + event_manager: EventManager | None = None, + ) -> None: + """Initiate the build process.""" logger.debug(f"Building {self.display_name}") - await self._build_each_vertex_in_params_dict(user_id) + await self._build_each_vertex_in_params_dict() if self.base_type is None: - raise ValueError(f"Base type for vertex {self.display_name} not found") + msg = f"Base type for vertex {self.display_name} not found" + raise ValueError(msg) - if not self._custom_component: - custom_component, custom_params = await initialize.loading.instantiate_class(user_id=user_id, vertex=self) + if not self.custom_component: + custom_component, custom_params = initialize.loading.instantiate_class( + user_id=user_id, vertex=self, event_manager=event_manager + ) else: - custom_component = self._custom_component + custom_component = self.custom_component + self.custom_component.set_event_manager(event_manager) custom_params = initialize.loading.get_params(self.params) - await self._build_results(custom_component, custom_params, fallback_to_env_vars) + await self._build_results( + custom_component=custom_component, + custom_params=custom_params, + fallback_to_env_vars=fallback_to_env_vars, + base_type=self.base_type, + ) self._validate_built_object() - self._built = True + self.built = True - def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dict]: - """ - Extracts messages from the artifacts. + def extract_messages_from_artifacts(self, artifacts: dict[str, Any]) -> list[dict]: + """Extracts messages from the artifacts. Args: artifacts (Dict[str, Any]): The artifacts to extract messages from. @@ -498,7 +523,7 @@ def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dic component_id = self.id _type = self.artifacts_type - if isinstance(sender_name, (Data, Message)): + if isinstance(sender_name, Data | Message): sender_name = sender_name.get_text() messages = [ @@ -518,16 +543,13 @@ def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dic return messages - def _finalize_build(self): + def finalize_build(self) -> None: result_dict = self.get_built_result() # We need to set the artifacts to pass information # to the frontend self.set_artifacts() artifacts = self.artifacts_raw - if isinstance(artifacts, dict): - messages = self.extract_messages_from_artifacts(artifacts) - else: - messages = [] + messages = self.extract_messages_from_artifacts(artifacts) if isinstance(artifacts, dict) else [] result_dict = ResultData( results=result_dict, artifacts=artifacts, @@ -539,11 +561,9 @@ def _finalize_build(self): ) self.set_result(result_dict) - async def _build_each_vertex_in_params_dict(self, user_id=None): - """ - Iterates over each vertex in the params dictionary and builds it. - """ - for key, value in self._raw_params.items(): + async def _build_each_vertex_in_params_dict(self) -> None: + """Iterates over each vertex in the params dictionary and builds it.""" + for key, value in self.raw_params.items(): if self._is_vertex(value): if value == self: del self.params[key] @@ -565,11 +585,9 @@ async def _build_each_vertex_in_params_dict(self, user_id=None): async def _build_dict_and_update_params( self, key, - vertices_dict: Dict[str, "Vertex"], - ): - """ - Iterates over a dictionary of vertices, builds each and updates the params dictionary. - """ + vertices_dict: dict[str, Vertex], + ) -> None: + """Iterates over a dictionary of vertices, builds each and updates the params dictionary.""" for sub_key, value in vertices_dict.items(): if not self._is_vertex(value): self.params[key][sub_key] = value @@ -578,20 +596,15 @@ async def _build_dict_and_update_params( self.params[key][sub_key] = result def _is_vertex(self, value): - """ - Checks if the provided value is an instance of Vertex. - """ + """Checks if the provided value is an instance of Vertex.""" return isinstance(value, Vertex) def _is_list_of_vertices(self, value): - """ - Checks if the provided value is a list of Vertex instances. - """ + """Checks if the provided value is a list of Vertex instances.""" return all(self._is_vertex(vertex) for vertex in value) - async def get_result(self, requester: "Vertex", target_handle_name: Optional[str] = None) -> Any: - """ - Retrieves the result of the vertex. + async def get_result(self, requester: Vertex, target_handle_name: str | None = None) -> Any: + """Retrieves the result of the vertex. This is a read-only method so it raises an error if the vertex has not been built yet. @@ -601,9 +614,19 @@ async def get_result(self, requester: "Vertex", target_handle_name: Optional[str async with self._lock: return await self._get_result(requester, target_handle_name) - async def _get_result(self, requester: "Vertex", target_handle_name: Optional[str] = None) -> Any: - """ - Retrieves the result of the built component. + def _log_transaction_async( + self, flow_id: str | UUID, source: Vertex, status, target: Vertex | None = None, error=None + ) -> None: + task = asyncio.create_task(log_transaction(flow_id, source, status, target, error)) + self.log_transaction_tasks.add(task) + task.add_done_callback(self.log_transaction_tasks.discard) + + async def _get_result( + self, + requester: Vertex, + target_handle_name: str | None = None, # noqa: ARG002 + ) -> Any: + """Retrieves the result of the built component. If the component has not been built yet, a ValueError is raised. @@ -611,21 +634,19 @@ async def _get_result(self, requester: "Vertex", target_handle_name: Optional[st The built result if use_result is True, else the built object. """ flow_id = self.graph.flow_id - if not self._built: + if not self.built: if flow_id: - asyncio.create_task(log_transaction(str(flow_id), source=self, target=requester, status="error")) - raise ValueError(f"Component {self.display_name} has not been built yet") + self._log_transaction_async(str(flow_id), source=self, target=requester, status="error") + msg = f"Component {self.display_name} has not been built yet" + raise ValueError(msg) - result = self._built_result if self.use_result else self._built_object + result = self.built_result if self.use_result else self.built_object if flow_id: - asyncio.create_task(log_transaction(str(flow_id), source=self, target=requester, status="success")) + self._log_transaction_async(str(flow_id), source=self, target=requester, status="success") return result - async def _build_vertex_and_update_params(self, key, vertex: "Vertex"): - """ - Builds a given vertex and updates the params dictionary accordingly. - """ - + async def _build_vertex_and_update_params(self, key, vertex: Vertex) -> None: + """Builds a given vertex and updates the params dictionary accordingly.""" result = await vertex.get_result(self, target_handle_name=key) self._handle_func(key, result) if isinstance(result, list): @@ -635,11 +656,9 @@ async def _build_vertex_and_update_params(self, key, vertex: "Vertex"): async def _build_list_of_vertices_and_update_params( self, key, - vertices: List["Vertex"], - ): - """ - Iterates over a list of vertices, builds each and updates the params dictionary. - """ + vertices: list[Vertex], + ) -> None: + """Iterates over a list of vertices, builds each and updates the params dictionary.""" self.params[key] = [] for vertex in vertices: result = await vertex.get_result(self, target_handle_name=key) @@ -658,41 +677,40 @@ async def _build_list_of_vertices_and_update_params( self.params[key].append(result) except AttributeError as e: logger.exception(e) - raise ValueError( + msg = ( f"Params {key} ({self.params[key]}) is not a list and cannot be extended with {result}" - f"Error building Component {self.display_name}: \n\n{str(e)}" - ) from e + f"Error building Component {self.display_name}: \n\n{e}" + ) + raise ValueError(msg) from e - def _handle_func(self, key, result): - """ - Handles 'func' key by checking if the result is a function and setting it as coroutine. - """ + def _handle_func(self, key, result) -> None: + """Handles 'func' key by checking if the result is a function and setting it as coroutine.""" if key == "func": if not isinstance(result, types.FunctionType): if hasattr(result, "run"): - result = result.run # type: ignore + result = result.run elif hasattr(result, "get_function"): - result = result.get_function() # type: ignore + result = result.get_function() elif inspect.iscoroutinefunction(result): self.params["coroutine"] = result else: self.params["coroutine"] = sync_to_async(result) - def _extend_params_list_with_result(self, key, result): - """ - Extends a list in the params dictionary with the given result if it exists. - """ + def _extend_params_list_with_result(self, key, result) -> None: + """Extends a list in the params dictionary with the given result if it exists.""" if isinstance(self.params[key], list): self.params[key].extend(result) - async def _build_results(self, custom_component, custom_params, fallback_to_env_vars=False): + async def _build_results( + self, custom_component, custom_params, base_type: str, *, fallback_to_env_vars=False + ) -> None: try: result = await initialize.loading.get_instance_results( custom_component=custom_component, custom_params=custom_params, vertex=self, fallback_to_env_vars=fallback_to_env_vars, - base_type=self.base_type, + base_type=base_type, ) self.outputs_logs = build_output_logs(self, result) @@ -701,84 +719,93 @@ async def _build_results(self, custom_component, custom_params, fallback_to_env_ except Exception as exc: tb = traceback.format_exc() logger.exception(exc) - raise ComponentBuildException(f"Error building Component {self.display_name}: \n\n{exc}", tb) from exc + msg = f"Error building Component {self.display_name}: \n\n{exc}" + raise ComponentBuildError(msg, tb) from exc - def _update_built_object_and_artifacts(self, result: Any | tuple[Any, dict] | tuple["Component", Any, dict]): - """ - Updates the built object and its artifacts. - """ + def _update_built_object_and_artifacts(self, result: Any | tuple[Any, dict] | tuple[Component, Any, dict]) -> None: + """Updates the built object and its artifacts.""" if isinstance(result, tuple): - if len(result) == 2: - self._built_object, self.artifacts = result - elif len(result) == 3: - self._custom_component, self._built_object, self.artifacts = result - self.logs = self._custom_component._output_logs + if len(result) == 2: # noqa: PLR2004 + self.built_object, self.artifacts = result + elif len(result) == 3: # noqa: PLR2004 + self.custom_component, self.built_object, self.artifacts = result + self.logs = self.custom_component._output_logs self.artifacts_raw = self.artifacts.get("raw", None) self.artifacts_type = { self.outputs[0]["name"]: self.artifacts.get("type", None) or ArtifactType.UNKNOWN.value } self.artifacts = {self.outputs[0]["name"]: self.artifacts} else: - self._built_object = result - - def _validate_built_object(self): - """ - Checks if the built object is None and raises a ValueError if so. - """ - if isinstance(self._built_object, UnbuiltObject): - raise ValueError(f"{self.display_name}: {self._built_object_repr()}") - elif self._built_object is None: + self.built_object = result + + def _validate_built_object(self) -> None: + """Checks if the built object is None and raises a ValueError if so.""" + if isinstance(self.built_object, UnbuiltObject): + msg = f"{self.display_name}: {self.built_object_repr()}" + raise TypeError(msg) + if self.built_object is None: message = f"{self.display_name} returned None." if self.base_type == "custom_components": message += " Make sure your build method returns a component." logger.warning(message) - elif isinstance(self._built_object, (Iterator, AsyncIterator)): - if self.display_name in ["Text Output"]: - raise ValueError(f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead.") - - def _reset(self, params_update: Optional[Dict[str, Any]] = None): - self._built = False - self._built_object = UnbuiltObject() - self._built_result = UnbuiltResult() + elif isinstance(self.built_object, Iterator | AsyncIterator): + if self.display_name == "Text Output": + msg = f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead." + raise ValueError(msg) + + def _reset(self) -> None: + self.built = False + self.built_object = UnbuiltObject() + self.built_result = UnbuiltResult() self.artifacts = {} self.steps_ran = [] - self._build_params() + self.build_params() - def _is_chat_input(self): + def _is_chat_input(self) -> bool: return False - def build_inactive(self): + def build_inactive(self) -> None: # Just set the results to None - self._built = True - self._built_object = None - self._built_result = None + self.built = True + self.built_object = None + self.built_result = None async def build( self, user_id=None, - inputs: Optional[Dict[str, Any]] = None, - files: Optional[list[str]] = None, - requester: Optional["Vertex"] = None, + inputs: dict[str, Any] | None = None, + files: list[str] | None = None, + requester: Vertex | None = None, + event_manager: EventManager | None = None, **kwargs, ) -> Any: async with self._lock: if self.state == VertexStates.INACTIVE: # If the vertex is inactive, return None self.build_inactive() - return + return None - if self.frozen and self._built: + if self.frozen and self.built: return await self.get_requester_result(requester) - elif self._built and requester is not None: + if self.built and requester is not None: # This means that the vertex has already been built # and we are just getting the result for the requester return await self.get_requester_result(requester) self._reset() - + # inject session_id if it is not None + if inputs is not None and "session" in inputs and inputs["session"] is not None and self.has_session_id: + session_id_value = self.get_value_from_template_dict("session_id") + if session_id_value == "": + self.update_raw_params({"session_id": inputs["session"]}, overwrite=True) if self._is_chat_input() and (inputs or files): chat_input = {} - if inputs: + if ( + inputs + and isinstance(inputs, dict) + and "input_value" in inputs + and inputs.get("input_value") is not None + ): chat_input.update({"input_value": inputs.get(INPUT_FIELD_NAME, "")}) if files: chat_input.update({"files": files}) @@ -788,22 +815,18 @@ async def build( # Run steps for step in self.steps: if step not in self.steps_ran: - if inspect.iscoroutinefunction(step): - await step(user_id=user_id, **kwargs) - else: - step(user_id=user_id, **kwargs) + await step(user_id=user_id, event_manager=event_manager, **kwargs) self.steps_ran.append(step) - self._finalize_build() + self.finalize_build() - result = await self.get_requester_result(requester) - return result + return await self.get_requester_result(requester) - async def get_requester_result(self, requester: Optional["Vertex"]): + async def get_requester_result(self, requester: Vertex | None): # If the requester is None, this means that # the Vertex is the root of the graph if requester is None: - return self._built_object + return self.built_object # Get the requester edge requester_edge = next((edge for edge in self.edges if edge.target_id == requester.id), None) @@ -814,7 +837,7 @@ async def get_requester_result(self, requester: Optional["Vertex"]): else await requester_edge.get_result_from_source(source=self, target=requester) ) - def add_edge(self, edge: "CycleEdge") -> None: + def add_edge(self, edge: CycleEdge) -> None: if edge not in self.edges: self.edges.append(edge) @@ -828,16 +851,24 @@ def __eq__(self, __o: object) -> bool: # We should create a more robust comparison # for the Vertex class ids_are_equal = self.id == __o.id - # self._data is a dict and we need to compare them + # self.data is a dict and we need to compare them # to check if they are equal data_are_equal = self.data == __o.data - return ids_are_equal and data_are_equal except AttributeError: return False + else: + return ids_are_equal and data_are_equal def __hash__(self) -> int: return id(self) - def _built_object_repr(self): - # Add a message with an emoji, stars for sucess, - return "Built successfully ✨" if self._built_object is not None else "Failed to build 😵‍💫" + def built_object_repr(self) -> str: + # Add a message with an emoji, stars for success, + return "Built successfully ✨" if self.built_object is not None else "Failed to build 😵‍💫" + + def apply_on_outputs(self, func: Callable[[Any], Any]) -> None: + """Applies a function to the outputs of the vertex.""" + if not self.custom_component or not self.custom_component.outputs: + return + # Apply the function to each output + [func(output) for output in self.custom_component._outputs_map.values()] diff --git a/src/backend/base/langflow/graph/vertex/constants.py b/src/backend/base/langflow/graph/vertex/constants.py index 8b137891791f..e69de29bb2d1 100644 --- a/src/backend/base/langflow/graph/vertex/constants.py +++ b/src/backend/base/langflow/graph/vertex/constants.py @@ -1 +0,0 @@ - diff --git a/src/backend/base/langflow/graph/vertex/exceptions.py b/src/backend/base/langflow/graph/vertex/exceptions.py index 2d610f027cb1..5b773bad145c 100644 --- a/src/backend/base/langflow/graph/vertex/exceptions.py +++ b/src/backend/base/langflow/graph/vertex/exceptions.py @@ -1,4 +1,4 @@ -class NoComponentInstance(Exception): +class NoComponentInstanceError(Exception): def __init__(self, vertex_id: str): message = f"Vertex {vertex_id} does not have a component instance." super().__init__(message) diff --git a/src/backend/base/langflow/graph/vertex/schema.py b/src/backend/base/langflow/graph/vertex/schema.py index f7ffd32b689f..5a52cbd80e41 100644 --- a/src/backend/base/langflow/graph/vertex/schema.py +++ b/src/backend/base/langflow/graph/vertex/schema.py @@ -1,8 +1,13 @@ -from typing import Dict +from enum import Enum from typing_extensions import NotRequired, TypedDict +class NodeTypeEnum(str, Enum): + NoteNode = "noteNode" + GenericNode = "genericNode" + + class Position(TypedDict): x: float y: float @@ -10,7 +15,7 @@ class Position(TypedDict): class NodeData(TypedDict): id: str - data: Dict + data: dict dragging: NotRequired[bool] height: NotRequired[int] width: NotRequired[int] @@ -18,3 +23,4 @@ class NodeData(TypedDict): positionAbsolute: NotRequired[Position] selected: NotRequired[bool] parent_node_id: NotRequired[str] + type: NotRequired[NodeTypeEnum] diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py index 1c60b54618dc..696d47ed736c 100644 --- a/src/backend/base/langflow/graph/vertex/types.py +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -1,17 +1,18 @@ -import asyncio +from __future__ import annotations + +import contextlib import json -from typing import TYPE_CHECKING, Any, AsyncIterator, Dict, Generator, Iterator, List, cast +from collections.abc import AsyncIterator, Generator, Iterator +from typing import TYPE_CHECKING, Any, cast import yaml from langchain_core.messages import AIMessage, AIMessageChunk from loguru import logger from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes, ResultData -from langflow.graph.utils import UnbuiltObject, log_transaction, log_vertex_build, serialize_field +from langflow.graph.utils import UnbuiltObject, log_vertex_build, rewrite_file_path, serialize_field from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.exceptions import NoComponentInstance -from langflow.graph.vertex.schema import NodeData -from langflow.inputs.inputs import InputTypes +from langflow.graph.vertex.exceptions import NoComponentInstanceError from langflow.schema import Data from langflow.schema.artifact import ArtifactType from langflow.schema.message import Message @@ -22,15 +23,18 @@ if TYPE_CHECKING: from langflow.graph.edge.base import CycleEdge + from langflow.graph.vertex.schema import NodeData + from langflow.inputs.inputs import InputTypes class CustomComponentVertex(Vertex): def __init__(self, data: NodeData, graph): super().__init__(data, graph=graph, base_type="custom_components") - def _built_object_repr(self): + def built_object_repr(self): if self.artifacts and "repr" in self.artifacts: - return self.artifacts["repr"] or super()._built_object_repr() + return self.artifacts["repr"] or super().built_object_repr() + return None class ComponentVertex(Vertex): @@ -38,41 +42,40 @@ def __init__(self, data: NodeData, graph): super().__init__(data, graph=graph, base_type="component") def get_input(self, name: str) -> InputTypes: - if self._custom_component is None: - raise ValueError(f"Vertex {self.id} does not have a component instance.") - return self._custom_component.get_input(name) + if self.custom_component is None: + msg = f"Vertex {self.id} does not have a component instance." + raise ValueError(msg) + return self.custom_component.get_input(name) def get_output(self, name: str) -> Output: - if self._custom_component is None: - raise NoComponentInstance(self.id) - return self._custom_component.get_output(name) + if self.custom_component is None: + raise NoComponentInstanceError(self.id) + return self.custom_component.get_output(name) - def _built_object_repr(self): + def built_object_repr(self): if self.artifacts and "repr" in self.artifacts: - return self.artifacts["repr"] or super()._built_object_repr() + return self.artifacts["repr"] or super().built_object_repr() + return None - def _update_built_object_and_artifacts(self, result): - """ - Updates the built object and its artifacts. - """ + def _update_built_object_and_artifacts(self, result) -> None: + """Updates the built object and its artifacts.""" if isinstance(result, tuple): - if len(result) == 2: - self._built_object, self.artifacts = result - elif len(result) == 3: - self._custom_component, self._built_object, self.artifacts = result - self.logs = self._custom_component._output_logs + if len(result) == 2: # noqa: PLR2004 + self.built_object, self.artifacts = result + elif len(result) == 3: # noqa: PLR2004 + self.custom_component, self.built_object, self.artifacts = result + self.logs = self.custom_component._output_logs for key in self.artifacts: self.artifacts_raw[key] = self.artifacts[key].get("raw", None) self.artifacts_type[key] = self.artifacts[key].get("type", None) or ArtifactType.UNKNOWN.value else: - self._built_object = result + self.built_object = result - for key, value in self._built_object.items(): + for key, value in self.built_object.items(): self.add_result(key, value) - def get_edge_with_target(self, target_id: str) -> Generator["CycleEdge", None, None]: - """ - Get the edge with the target id. + def get_edge_with_target(self, target_id: str) -> Generator[CycleEdge, None, None]: + """Get the edge with the target id. Args: target_id: The target id of the edge. @@ -84,9 +87,8 @@ def get_edge_with_target(self, target_id: str) -> Generator["CycleEdge", None, N if edge.target_id == target_id: yield edge - async def _get_result(self, requester: "Vertex", target_handle_name: str | None = None) -> Any: - """ - Retrieves the result of the built component. + async def _get_result(self, requester: Vertex, target_handle_name: str | None = None) -> Any: + """Retrieves the result of the built component. If the component has not been built yet, a ValueError is raised. @@ -94,22 +96,23 @@ async def _get_result(self, requester: "Vertex", target_handle_name: str | None The built result if use_result is True, else the built object. """ flow_id = self.graph.flow_id - if not self._built: - if flow_id: - asyncio.create_task( - log_transaction(source=self, target=requester, flow_id=str(flow_id), status="error") - ) + if not self.built: + default_value = UNDEFINED for edge in self.get_edge_with_target(requester.id): # We need to check if the edge is a normal edge - # or a contract edge - if edge.is_cycle and edge.target_param: - return requester.get_value_from_template_dict(edge.target_param) + default_value = requester.get_value_from_template_dict(edge.target_param) - raise ValueError(f"Component {self.display_name} has not been built yet") + if flow_id: + self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="error") + if default_value is not UNDEFINED: + return default_value + msg = f"Component {self.display_name} has not been built yet" + raise ValueError(msg) if requester is None: - raise ValueError("Requester Vertex is None") + msg = "Requester Vertex is None" + raise ValueError(msg) edges = self.get_edge_with_target(requester.id) result = UNDEFINED @@ -127,23 +130,24 @@ async def _get_result(self, requester: "Vertex", target_handle_name: str | None result = self.results[edge.source_handle.name] else: result = cast(Any, output.value) - except NoComponentInstance: + except NoComponentInstanceError: result = self.results[edge.source_handle.name] break if result is UNDEFINED: if edge is None: - raise ValueError(f"Edge not found between {self.display_name} and {requester.display_name}") - elif edge.source_handle.name not in self.results: - raise ValueError(f"Result not found for {edge.source_handle.name}. Results: {self.results}") - else: - raise ValueError(f"Result not found for {edge.source_handle.name} in {edge}") + msg = f"Edge not found between {self.display_name} and {requester.display_name}" + raise ValueError(msg) + if edge.source_handle.name not in self.results: + msg = f"Result not found for {edge.source_handle.name}. Results: {self.results}" + raise ValueError(msg) + msg = f"Result not found for {edge.source_handle.name} in {edge}" + raise ValueError(msg) if flow_id: - asyncio.create_task(log_transaction(source=self, target=requester, flow_id=str(flow_id), status="success")) + self._log_transaction_async(source=self, target=requester, flow_id=str(flow_id), status="success") return result - def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dict]: - """ - Extracts messages from the artifacts. + def extract_messages_from_artifacts(self, artifacts: dict[str, Any]) -> list[dict]: + """Extracts messages from the artifacts. Args: artifacts (Dict[str, Any]): The artifacts to extract messages from. @@ -152,16 +156,15 @@ def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dic List[str]: The extracted messages. """ messages = [] - for key in artifacts: - artifact = artifacts[key] + for key, artifact in artifacts.items(): if any( - key not in artifact for key in ["text", "sender", "sender_name", "session_id", "stream_url"] + k not in artifact for k in ["text", "sender", "sender_name", "session_id", "stream_url"] ) and not isinstance(artifact, Message): continue message_dict = artifact if isinstance(artifact, dict) else artifact.model_dump() if not message_dict.get("text"): continue - try: + with contextlib.suppress(KeyError): messages.append( ChatOutputResponse( message=message_dict["text"], @@ -176,11 +179,9 @@ def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dic type=self.artifacts_type[key], ).model_dump(exclude_none=True) ) - except KeyError: - pass return messages - def _finalize_build(self): + def finalize_build(self) -> None: result_dict = self.get_built_result() # We need to set the artifacts to pass information # to the frontend @@ -200,17 +201,18 @@ def _finalize_build(self): class InterfaceVertex(ComponentVertex): def __init__(self, data: NodeData, graph): super().__init__(data, graph=graph) + self.added_message = None self.steps = [self._build, self._run] + self.is_interface_component = True - def build_stream_url(self): + def build_stream_url(self) -> str: return f"/api/v1/build/{self.graph.flow_id}/{self.id}/stream" - def _built_object_repr(self): + def built_object_repr(self): if self.task_id and self.is_task: if task := self.get_task(): return str(task.info) - else: - return f"Task {self.task_id} is not running" + return f"Task {self.task_id} is not running" if self.artifacts: # dump as a yaml string if isinstance(self.artifacts, dict): @@ -222,27 +224,25 @@ def _built_object_repr(self): artifacts = [] for artifact in _artifacts: # artifacts = {k.title().replace("_", " "): v for k, v in self.artifacts.items() if v is not None} - artifact = {k.title().replace("_", " "): v for k, v in artifact.items() if v is not None} - artifacts.append(artifact) - yaml_str = yaml.dump(artifacts, default_flow_style=False, allow_unicode=True) - return yaml_str - return super()._built_object_repr() + _artifact = {k.title().replace("_", " "): v for k, v in artifact.items() if v is not None} + artifacts.append(_artifact) + return yaml.dump(artifacts, default_flow_style=False, allow_unicode=True) + return super().built_object_repr() def _process_chat_component(self): - """ - Process the chat component and return the message. + """Process the chat component and return the message. This method processes the chat component by extracting the necessary parameters such as sender, sender_name, and message from the `params` dictionary. It then - performs additional operations based on the type of the `_built_object` attribute. - If `_built_object` is an instance of `AIMessage`, it creates a `ChatOutputResponse` - object using the `from_message` method. If `_built_object` is not an instance of - `UnbuiltObject`, it checks the type of `_built_object` and performs specific - operations accordingly. If `_built_object` is a dictionary, it converts it into a - code block. If `_built_object` is an instance of `Data`, it assigns the `text` + performs additional operations based on the type of the `built_object` attribute. + If `built_object` is an instance of `AIMessage`, it creates a `ChatOutputResponse` + object using the `from_message` method. If `built_object` is not an instance of + `UnbuiltObject`, it checks the type of `built_object` and performs specific + operations accordingly. If `built_object` is a dictionary, it converts it into a + code block. If `built_object` is an instance of `Data`, it assigns the `text` attribute to the `message` variable. If `message` is an instance of `AsyncIterator` or `Iterator`, it builds a stream URL and sets `message` to an empty string. If - `_built_object` is not a string, it converts it to a string. If `message` is a + `built_object` is not a string, it converts it to a string. If `message` is a generator or iterator, it assigns it to the `message` variable. Finally, it creates a `ChatOutputResponse` object using the extracted parameters and assigns it to the `artifacts` attribute. If `artifacts` is not None, it calls the `model_dump` method @@ -256,6 +256,10 @@ def _process_chat_component(self): sender = self.params.get("sender", None) sender_name = self.params.get("sender_name", None) message = self.params.get(INPUT_FIELD_NAME, None) + files = self.params.get("files", []) + treat_file_path = files is not None and not isinstance(files, list) and isinstance(files, str) + if treat_file_path: + self.params["files"] = rewrite_file_path(files) files = [{"path": file} if isinstance(file, str) else file for file in self.params.get("files", [])] if isinstance(message, str): message = unescape_string(message) @@ -266,7 +270,7 @@ def _process_chat_component(self): text_output = self.results["message"].text else: text_output = message - if isinstance(text_output, (AIMessage, AIMessageChunk)): + if isinstance(text_output, AIMessage | AIMessageChunk): artifacts = ChatOutputResponse.from_message( text_output, sender=sender, @@ -279,12 +283,12 @@ def _process_chat_component(self): message = dict_to_codeblock(text_output) elif isinstance(text_output, Data): message = text_output.text - elif isinstance(message, (AsyncIterator, Iterator)): + elif isinstance(message, AsyncIterator | Iterator): stream_url = self.build_stream_url() message = "" self.results["text"] = message self.results["message"].text = message - self._built_object = self.results + self.built_object = self.results elif not isinstance(text_output, str): message = str(text_output) # if the message is a generator or iterator @@ -313,8 +317,7 @@ def _process_chat_component(self): return message def _process_data_component(self): - """ - Process the record component of the vertex. + """Process the record component of the vertex. If the built object is an instance of `Data`, it calls the `model_dump` method and assigns the result to the `artifacts` attribute. @@ -332,55 +335,59 @@ def _process_data_component(self): ValueError: If an element in the list is not an instance of `Data` and `ignore_errors` is set to `False`. """ - if isinstance(self._built_object, Data): - artifacts = [self._built_object.data] - elif isinstance(self._built_object, list): + if isinstance(self.built_object, Data): + artifacts = [self.built_object.data] + elif isinstance(self.built_object, list): artifacts = [] ignore_errors = self.params.get("ignore_errors", False) - for value in self._built_object: + for value in self.built_object: if isinstance(value, Data): artifacts.append(value.data) elif ignore_errors: logger.error(f"Data expected, but got {value} of type {type(value)}") else: - raise ValueError(f"Data expected, but got {value} of type {type(value)}") + msg = f"Data expected, but got {value} of type {type(value)}" + raise ValueError(msg) self.artifacts = DataOutputResponse(data=artifacts) - return self._built_object - - async def _run(self, *args, **kwargs): - if self.is_interface_component: - if self.vertex_type in CHAT_COMPONENTS: - message = self._process_chat_component() - elif self.vertex_type in RECORDS_COMPONENTS: - message = self._process_data_component() - if isinstance(self._built_object, (AsyncIterator, Iterator)): - if self.params.get("return_data", False): - self._built_object = Data(text=message, data=self.artifacts) - else: - self._built_object = message - self._built_result = self._built_object - - else: - await super()._run(*args, **kwargs) + return self.built_object + + async def _run(self, *args, **kwargs) -> None: # noqa: ARG002 + if self.vertex_type in CHAT_COMPONENTS: + message = self._process_chat_component() + elif self.vertex_type in RECORDS_COMPONENTS: + message = self._process_data_component() + if isinstance(self.built_object, AsyncIterator | Iterator): + if self.params.get("return_data", False): + self.built_object = Data(text=message, data=self.artifacts) + else: + self.built_object = message + self.built_result = self.built_object async def stream(self): iterator = self.params.get(INPUT_FIELD_NAME, None) - if not isinstance(iterator, (AsyncIterator, Iterator)): - raise ValueError("The message must be an iterator or an async iterator.") + if not isinstance(iterator, AsyncIterator | Iterator): + msg = "The message must be an iterator or an async iterator." + raise TypeError(msg) is_async = isinstance(iterator, AsyncIterator) complete_message = "" if is_async: async for message in iterator: - message = message.content if hasattr(message, "content") else message - message = message.text if hasattr(message, "text") else message - yield message - complete_message += message + _message = message.content if hasattr(message, "content") else message + _message = _message.text if hasattr(_message, "text") else _message + yield _message + complete_message += _message else: for message in iterator: - message = message.content if hasattr(message, "content") else message - message = message.text if hasattr(message, "text") else message - yield message - complete_message += message + _message = message.content if hasattr(message, "content") else message + _message = _message.text if hasattr(_message, "text") else _message + yield _message + complete_message += _message + + files = self.params.get("files", []) + + treat_file_path = files is not None and not isinstance(files, list) and isinstance(files, str) + if treat_file_path: + self.params["files"] = rewrite_file_path(files) if hasattr(self.params.get("sender_name"), "get_text"): sender_name = self.params.get("sender_name").get_text() @@ -403,43 +410,44 @@ async def stream(self): session_id=self.params.get("session_id", ""), ) self.params[INPUT_FIELD_NAME] = complete_message - if isinstance(self._built_object, dict): - for key, value in self._built_object.items(): - if hasattr(value, "text") and (isinstance(value.text, (AsyncIterator, Iterator)) or value.text == ""): - self._built_object[key] = message + if isinstance(self.built_object, dict): + for key, value in self.built_object.items(): + if hasattr(value, "text") and (isinstance(value.text, AsyncIterator | Iterator) or value.text == ""): + self.built_object[key] = message else: - self._built_object = message + self.built_object = message self.artifacts_type = ArtifactType.MESSAGE # Update artifacts with the message # and remove the stream_url - self._finalize_build() + self.finalize_build() logger.debug(f"Streamed message: {complete_message}") # Set the result in the vertex of origin edges = self.get_edge_with_target(self.id) for edge in edges: origin_vertex = self.graph.get_vertex(edge.source_id) for key, value in origin_vertex.results.items(): - if isinstance(value, (AsyncIterator, Iterator)): + if isinstance(value, AsyncIterator | Iterator): origin_vertex.results[key] = complete_message - if self._custom_component: - if hasattr(self._custom_component, "should_store_message") and hasattr( - self._custom_component, "store_message" - ): - self._custom_component.store_message(message) + if ( + self.custom_component + and hasattr(self.custom_component, "should_store_message") + and hasattr(self.custom_component, "store_message") + ): + self.custom_component.store_message(message) log_vertex_build( flow_id=self.graph.flow_id, vertex_id=self.id, valid=True, - params=self._built_object_repr(), + params=self.built_object_repr(), data=self.result, artifacts=self.artifacts, ) self._validate_built_object() - self._built = True + self.built = True - async def consume_async_generator(self): + async def consume_async_generator(self) -> None: async for _ in self.stream(): pass @@ -454,15 +462,16 @@ def __init__(self, data: NodeData, graph): self.is_state = False @property - def successors_ids(self) -> List[str]: + def successors_ids(self) -> list[str]: if self._successors_ids is None: self.is_state = False return super().successors_ids return self._successors_ids - def _built_object_repr(self): + def built_object_repr(self): if self.artifacts and "repr" in self.artifacts: - return self.artifacts["repr"] or super()._built_object_repr() + return self.artifacts["repr"] or super().built_object_repr() + return None def dict_to_codeblock(d: dict) -> str: diff --git a/src/backend/base/langflow/graph/vertex/utils.py b/src/backend/base/langflow/graph/vertex/utils.py index 0f69e4b2d495..4844ad8eb60f 100644 --- a/src/backend/base/langflow/graph/vertex/utils.py +++ b/src/backend/base/langflow/graph/vertex/utils.py @@ -1,19 +1,19 @@ +from __future__ import annotations + from typing import TYPE_CHECKING if TYPE_CHECKING: from langflow.graph.vertex.base import Vertex -def build_clean_params(target: "Vertex") -> dict: - """ - Cleans the parameters of the target vertex. - """ +def build_clean_params(target: Vertex) -> dict: + """Cleans the parameters of the target vertex.""" # Removes all keys that the values aren't python types like str, int, bool, etc. params = { - key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict)) + key: value for key, value in target.params.items() if isinstance(value, str | int | bool | float | list | dict) } # if it is a list we need to check if the contents are python types for key, value in params.items(): if isinstance(value, list): - params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))] + params[key] = [item for item in value if isinstance(item, str | int | bool | float | list | dict)] return params diff --git a/src/backend/base/langflow/helpers/__init__.py b/src/backend/base/langflow/helpers/__init__.py index 70c5733f6d15..02f9d73c69f6 100644 --- a/src/backend/base/langflow/helpers/__init__.py +++ b/src/backend/base/langflow/helpers/__init__.py @@ -1,3 +1,3 @@ from .data import data_to_text, docs_to_data, messages_to_text -__all__ = ["docs_to_data", "data_to_text", "messages_to_text"] +__all__ = ["data_to_text", "docs_to_data", "messages_to_text"] diff --git a/src/backend/base/langflow/helpers/base_model.py b/src/backend/base/langflow/helpers/base_model.py index c81fd99d2c61..87b4d64cece1 100644 --- a/src/backend/base/langflow/helpers/base_model.py +++ b/src/backend/base/langflow/helpers/base_model.py @@ -1,6 +1,71 @@ +from typing import Any, TypedDict + from pydantic import BaseModel as PydanticBaseModel -from pydantic import ConfigDict +from pydantic import ConfigDict, Field, create_model + +TRUE_VALUES = ["true", "1", "t", "y", "yes"] + + +class SchemaField(TypedDict): + name: str + type: str + description: str + multiple: bool class BaseModel(PydanticBaseModel): model_config = ConfigDict(populate_by_name=True) + + +def _get_type_annotation(type_str: str, *, multiple: bool) -> type: + type_mapping = { + "str": str, + "int": int, + "float": float, + "bool": bool, + "boolean": bool, + "list": list[Any], + "dict": dict[str, Any], + "number": float, + "text": str, + } + try: + base_type = type_mapping[type_str] + except KeyError as e: + msg = f"Invalid type: {type_str}" + raise ValueError(msg) from e + if multiple: + return list[base_type] # type: ignore[valid-type] + return base_type # type: ignore[return-value] + + +def build_model_from_schema(schema: list[SchemaField]) -> type[PydanticBaseModel]: + fields = {} + for field in schema: + field_name = field["name"] + field_type_str = field["type"] + description = field.get("description", "") + multiple = field.get("multiple", False) + multiple = coalesce_bool(multiple) + field_type_annotation = _get_type_annotation(field_type_str, multiple=multiple) + fields[field_name] = (field_type_annotation, Field(description=description)) + return create_model("OutputModel", **fields) + + +def coalesce_bool(value: Any) -> bool: + """Coalesces the given value into a boolean. + + Args: + value (Any): The value to be coalesced. + + Returns: + bool: The coalesced boolean value. + + """ + if isinstance(value, bool): + return value + if isinstance(value, str): + return value.lower() in TRUE_VALUES + if isinstance(value, int): + return bool(value) + return False diff --git a/src/backend/base/langflow/helpers/custom.py b/src/backend/base/langflow/helpers/custom.py index bdbb128f4c2a..225c124dcfb4 100644 --- a/src/backend/base/langflow/helpers/custom.py +++ b/src/backend/base/langflow/helpers/custom.py @@ -2,7 +2,7 @@ def format_type(type_: Any) -> str: - if type_ == str: + if type_ is str: type_ = "Text" elif hasattr(type_, "__name__"): type_ = type_.__name__ diff --git a/src/backend/base/langflow/helpers/data.py b/src/backend/base/langflow/helpers/data.py index 38103707869a..d2cb88ba7007 100644 --- a/src/backend/base/langflow/helpers/data.py +++ b/src/backend/base/langflow/helpers/data.py @@ -1,5 +1,3 @@ -from typing import Union - from langchain_core.documents import Document from langflow.schema import Data @@ -7,8 +5,7 @@ def docs_to_data(documents: list[Document]) -> list[Data]: - """ - Converts a list of Documents to a list of Data. + """Converts a list of Documents to a list of Data. Args: documents (list[Document]): The list of Documents to convert. @@ -19,12 +16,13 @@ def docs_to_data(documents: list[Document]) -> list[Data]: return [Data.from_document(document) for document in documents] -def data_to_text(template: str, data: Union[Data, list[Data]], sep: str = "\n") -> str: - """ - Converts a list of Data to a list of texts. +def data_to_text(template: str, data: Data | list[Data], sep: str = "\n") -> str: + """Converts a list of Data to a list of texts. Args: + template (str): The template to use for the conversion. data (list[Data]): The list of Data to convert. + sep (str): The separator used to join the data. Returns: list[str]: The converted list of texts. @@ -32,22 +30,21 @@ def data_to_text(template: str, data: Union[Data, list[Data]], sep: str = "\n") if isinstance(data, (Data)): data = [data] # Check if there are any format strings in the template - _data = [] - for value in data: + _data = [ # If it is not a record, create one with the key "text" - if not isinstance(value, Data): - value = Data(text=value) - _data.append(value) + Data(text=value) if not isinstance(value, Data) else value + for value in data + ] formated_data = [template.format(data=value.data, **value.data) for value in _data] return sep.join(formated_data) -def messages_to_text(template: str, messages: Union[Message, list[Message]]) -> str: - """ - Converts a list of Messages to a list of texts. +def messages_to_text(template: str, messages: Message | list[Message]) -> str: + """Converts a list of Messages to a list of texts. Args: + template (str): The template to use for the conversion. messages (list[Message]): The list of Messages to convert. Returns: @@ -60,7 +57,8 @@ def messages_to_text(template: str, messages: Union[Message, list[Message]]) -> for message in messages: # If it is not a message, create one with the key "text" if not isinstance(message, Message): - raise ValueError("All elements in the list must be of type Message.") + msg = "All elements in the list must be of type Message." + raise TypeError(msg) _messages.append(message) formated_messages = [template.format(data=message.model_dump(), **message.model_dump()) for message in _messages] diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py index b753620c5de5..675e14c5c1a1 100644 --- a/src/backend/base/langflow/helpers/flow.py +++ b/src/backend/base/langflow/helpers/flow.py @@ -1,20 +1,24 @@ -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple, Type, Union, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast from uuid import UUID from fastapi import HTTPException from pydantic.v1 import BaseModel, Field, create_model from sqlmodel import select -from langflow.graph.schema import RunOutputs -from langflow.schema import Data from langflow.schema.schema import INPUT_FIELD_NAME from langflow.services.database.models.flow import Flow from langflow.services.database.models.flow.model import FlowRead from langflow.services.deps import get_settings_service, session_scope if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + from langflow.graph.graph.base import Graph + from langflow.graph.schema import RunOutputs from langflow.graph.vertex.base import Vertex + from langflow.schema import Data INPUT_TYPE_MAP = { "ChatInput": {"type_hint": "Optional[str]", "default": '""'}, @@ -23,61 +27,75 @@ } -def list_flows(*, user_id: Optional[str] = None) -> List[Data]: +def list_flows(*, user_id: str | None = None) -> list[Data]: if not user_id: - raise ValueError("Session is invalid") + msg = "Session is invalid" + raise ValueError(msg) try: with session_scope() as session: flows = session.exec( - select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa + select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa: E712 ).all() - flows_data = [flow.to_data() for flow in flows] - return flows_data + return [flow.to_data() for flow in flows] except Exception as e: - raise ValueError(f"Error listing flows: {e}") + msg = f"Error listing flows: {e}" + raise ValueError(msg) from e async def load_flow( - user_id: str, flow_id: Optional[str] = None, flow_name: Optional[str] = None, tweaks: Optional[dict] = None -) -> "Graph": + user_id: str, flow_id: str | None = None, flow_name: str | None = None, tweaks: dict | None = None +) -> Graph: from langflow.graph.graph.base import Graph from langflow.processing.process import process_tweaks if not flow_id and not flow_name: - raise ValueError("Flow ID or Flow Name is required") + msg = "Flow ID or Flow Name is required" + raise ValueError(msg) if not flow_id and flow_name: flow_id = find_flow(flow_name, user_id) if not flow_id: - raise ValueError(f"Flow {flow_name} not found") + msg = f"Flow {flow_name} not found" + raise ValueError(msg) with session_scope() as session: graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None if not graph_data: - raise ValueError(f"Flow {flow_id} not found") + msg = f"Flow {flow_id} not found" + raise ValueError(msg) if tweaks: graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) - graph = Graph.from_payload(graph_data, flow_id=flow_id, user_id=user_id) - return graph + return Graph.from_payload(graph_data, flow_id=flow_id, user_id=user_id) -def find_flow(flow_name: str, user_id: str) -> Optional[str]: +def find_flow(flow_name: str, user_id: str) -> str | None: with session_scope() as session: flow = session.exec(select(Flow).where(Flow.name == flow_name).where(Flow.user_id == user_id)).first() return flow.id if flow else None async def run_flow( - inputs: Optional[Union[dict, List[dict]]] = None, - tweaks: Optional[dict] = None, - flow_id: Optional[str] = None, - flow_name: Optional[str] = None, - output_type: Optional[str] = "chat", - user_id: Optional[str] = None, -) -> List[RunOutputs]: + inputs: dict | list[dict] | None = None, + tweaks: dict | None = None, + flow_id: str | None = None, + flow_name: str | None = None, + output_type: str | None = "chat", + user_id: str | None = None, + run_id: str | None = None, + session_id: str | None = None, + graph: Graph | None = None, +) -> list[RunOutputs]: if user_id is None: - raise ValueError("Session is invalid") - graph = await load_flow(user_id, flow_id, flow_name, tweaks) + msg = "Session is invalid" + raise ValueError(msg) + if graph is None: + graph = await load_flow(user_id, flow_id, flow_name, tweaks) + if run_id: + graph.set_run_id(UUID(run_id)) + if session_id: + graph.session_id = session_id + if user_id: + graph.user_id = user_id if inputs is None: inputs = [] @@ -96,7 +114,7 @@ async def run_flow( for vertex in graph.vertices if output_type == "debug" or ( - vertex.is_output and (output_type == "any" or output_type in vertex.id.lower()) # type: ignore + vertex.is_output and (output_type == "any" or output_type in vertex.id.lower()) # type: ignore[operator] ) ] @@ -112,14 +130,14 @@ async def run_flow( def generate_function_for_flow( - inputs: List["Vertex"], flow_id: str, user_id: str | UUID | None + inputs: list[Vertex], flow_id: str, user_id: str | UUID | None ) -> Callable[..., Awaitable[Any]]: - """ - Generate a dynamic flow function based on the given inputs and flow ID. + """Generate a dynamic flow function based on the given inputs and flow ID. Args: inputs (List[Vertex]): The list of input vertices for the flow. flow_id (str): The ID of the flow. + user_id (str | UUID | None): The user ID associated with the flow. Returns: Coroutine: The dynamic flow function. @@ -135,7 +153,10 @@ def generate_function_for_flow( """ # Prepare function arguments with type hints and default values args = [ - f"{input_.display_name.lower().replace(' ', '_')}: {INPUT_TYPE_MAP[input_.base_name]['type_hint']} = {INPUT_TYPE_MAP[input_.base_name]['default']}" + ( + f"{input_.display_name.lower().replace(' ', '_')}: {INPUT_TYPE_MAP[input_.base_name]['type_hint']} = " + f"{INPUT_TYPE_MAP[input_.base_name]['default']}" + ) for input_ in inputs ] @@ -148,7 +169,7 @@ def generate_function_for_flow( # Map original argument names to their corresponding Pythonic variable names in the function arg_mappings = ", ".join( f'"{original_name}": {name}' - for original_name, name in zip(original_arg_names, [arg.split(":")[0] for arg in args]) + for original_name, name in zip(original_arg_names, [arg.split(":")[0] for arg in args], strict=True) ) func_body = f""" @@ -172,7 +193,7 @@ async def flow_function({func_args}): if run_output is not None: for output in run_output.outputs: if output: - data.extend(build_data_from_result_data(output, get_final_results_only=True)) + data.extend(build_data_from_result_data(output)) return format_flow_output_data(data) except Exception as e: raise ToolException(f'Error running flow: ' + e) @@ -180,19 +201,19 @@ async def flow_function({func_args}): compiled_func = compile(func_body, "", "exec") local_scope: dict = {} - exec(compiled_func, globals(), local_scope) + exec(compiled_func, globals(), local_scope) # noqa: S102 return local_scope["flow_function"] def build_function_and_schema( - flow_data: Data, graph: "Graph", user_id: str | UUID | None -) -> Tuple[Callable[..., Awaitable[Any]], Type[BaseModel]]: - """ - Builds a dynamic function and schema for a given flow. + flow_data: Data, graph: Graph, user_id: str | UUID | None +) -> tuple[Callable[..., Awaitable[Any]], type[BaseModel]]: + """Builds a dynamic function and schema for a given flow. Args: flow_data (Data): The flow record containing information about the flow. graph (Graph): The graph representing the flow. + user_id (str): The user ID associated with the flow. Returns: Tuple[Callable, BaseModel]: A tuple containing the dynamic function and the schema. @@ -204,9 +225,8 @@ def build_function_and_schema( return dynamic_flow_function, schema -def get_flow_inputs(graph: "Graph") -> List["Vertex"]: - """ - Retrieves the flow inputs from the given graph. +def get_flow_inputs(graph: Graph) -> list[Vertex]: + """Retrieves the flow inputs from the given graph. Args: graph (Graph): The graph object representing the flow. @@ -214,16 +234,11 @@ def get_flow_inputs(graph: "Graph") -> List["Vertex"]: Returns: List[Data]: A list of input data, where each record contains the ID, name, and description of the input vertex. """ - inputs = [] - for vertex in graph.vertices: - if vertex.is_input: - inputs.append(vertex) - return inputs + return [vertex for vertex in graph.vertices if vertex.is_input] -def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseModel]: - """ - Builds a schema from the given inputs. +def build_schema_from_inputs(name: str, inputs: list[Vertex]) -> type[BaseModel]: + """Builds a schema from the given inputs. Args: name (str): The name of the schema. @@ -239,18 +254,18 @@ def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseMode field_name = input_.display_name.lower().replace(" ", "_") description = input_.description fields[field_name] = (str, Field(default="", description=description)) - return create_model(name, **fields) # type: ignore + return create_model(name, **fields) -def get_arg_names(inputs: List["Vertex"]) -> List[dict[str, str]]: - """ - Returns a list of dictionaries containing the component name and its corresponding argument name. +def get_arg_names(inputs: list[Vertex]) -> list[dict[str, str]]: + """Returns a list of dictionaries containing the component name and its corresponding argument name. Args: inputs (List[Vertex]): A list of Vertex objects representing the inputs. Returns: - List[dict[str, str]]: A list of dictionaries, where each dictionary contains the component name and its argument name. + List[dict[str, str]]: A list of dictionaries, where each dictionary contains the component name and its + argument name. """ return [ {"component_name": input_.display_name, "arg_name": input_.display_name.lower().replace(" ", "_")} @@ -258,8 +273,7 @@ def get_arg_names(inputs: List["Vertex"]) -> List[dict[str, str]]: ] -def get_flow_by_id_or_endpoint_name(flow_id_or_name: str, user_id: Optional[UUID] = None) -> FlowRead | None: - flow_read = None +def get_flow_by_id_or_endpoint_name(flow_id_or_name: str, user_id: UUID | None = None) -> FlowRead | None: with session_scope() as session: endpoint_name = None try: @@ -273,19 +287,20 @@ def get_flow_by_id_or_endpoint_name(flow_id_or_name: str, user_id: Optional[UUID flow = session.exec(stmt).first() if flow is None: raise HTTPException(status_code=404, detail=f"Flow identifier {flow_id_or_name} not found") - flow_read = FlowRead.model_validate(flow, from_attributes=True) - return flow_read + return FlowRead.model_validate(flow, from_attributes=True) -def generate_unique_flow_name(flow_name, user_id, session): +async def generate_unique_flow_name(flow_name, user_id, session): original_name = flow_name n = 1 while True: # Check if a flow with the given name exists - existing_flow = session.exec( - select(Flow).where( - Flow.name == flow_name, - Flow.user_id == user_id, + existing_flow = ( + await session.exec( + select(Flow).where( + Flow.name == flow_name, + Flow.user_id == user_id, + ) ) ).first() diff --git a/src/backend/base/langflow/helpers/folders.py b/src/backend/base/langflow/helpers/folders.py index c3d7567b51cc..be13b0d39a65 100644 --- a/src/backend/base/langflow/helpers/folders.py +++ b/src/backend/base/langflow/helpers/folders.py @@ -1,16 +1,19 @@ -from langflow.services.database.models.folder.model import Folder from sqlalchemy import select +from langflow.services.database.models.folder.model import Folder + -def generate_unique_folder_name(folder_name, user_id, session): +async def generate_unique_folder_name(folder_name, user_id, session): original_name = folder_name n = 1 while True: # Check if a folder with the given name exists - existing_folder = session.exec( - select(Folder).where( - Folder.name == folder_name, - Folder.user_id == user_id, + existing_folder = ( + await session.exec( + select(Folder).where( + Folder.name == folder_name, + Folder.user_id == user_id, + ) ) ).first() diff --git a/src/backend/base/langflow/helpers/user.py b/src/backend/base/langflow/helpers/user.py new file mode 100644 index 000000000000..9f3488033185 --- /dev/null +++ b/src/backend/base/langflow/helpers/user.py @@ -0,0 +1,27 @@ +from uuid import UUID + +from fastapi import HTTPException +from sqlmodel import select + +from langflow.services.database.models.flow.model import Flow +from langflow.services.database.models.user.model import User, UserRead +from langflow.services.deps import get_db_service + + +def get_user_by_flow_id_or_endpoint_name(flow_id_or_name: str) -> UserRead | None: + with get_db_service().with_session() as session: + try: + flow_id = UUID(flow_id_or_name) + flow = session.get(Flow, flow_id) + except ValueError: + stmt = select(Flow).where(Flow.endpoint_name == flow_id_or_name) + flow = session.exec(stmt).first() + + if flow is None: + raise HTTPException(status_code=404, detail=f"Flow identifier {flow_id_or_name} not found") + + user = session.get(User, flow.user_id) + if user is None: + raise HTTPException(status_code=404, detail=f"User for flow {flow_id_or_name} not found") + + return UserRead.model_validate(user, from_attributes=True) diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py index dfedeec7e2a6..594b9046cbf8 100644 --- a/src/backend/base/langflow/initial_setup/setup.py +++ b/src/backend/base/langflow/initial_setup/setup.py @@ -1,28 +1,38 @@ import copy import json -import os import shutil import time from collections import defaultdict from copy import deepcopy from datetime import datetime, timezone from pathlib import Path -from typing import Awaitable from uuid import UUID import orjson -from emoji import demojize, purely_emoji # type: ignore +from emoji import demojize, purely_emoji from loguru import logger +from sqlalchemy.exc import NoResultFound from sqlmodel import select -from langflow.base.constants import FIELD_FORMAT_ATTRIBUTES, NODE_FORMAT_ATTRIBUTES, ORJSON_OPTIONS -from langflow.graph.graph.base import Graph +from langflow.base.constants import ( + FIELD_FORMAT_ATTRIBUTES, + NODE_FORMAT_ATTRIBUTES, + ORJSON_OPTIONS, +) from langflow.services.auth.utils import create_super_user from langflow.services.database.models.flow.model import Flow, FlowCreate from langflow.services.database.models.folder.model import Folder, FolderCreate -from langflow.services.database.models.folder.utils import create_default_folder_if_it_doesnt_exist +from langflow.services.database.models.folder.utils import ( + create_default_folder_if_it_doesnt_exist, + get_default_folder_id, +) from langflow.services.database.models.user.crud import get_user_by_username -from langflow.services.deps import get_settings_service, get_storage_service, get_variable_service, session_scope +from langflow.services.deps import ( + get_settings_service, + get_storage_service, + get_variable_service, + session_scope, +) from langflow.template.field.prompt import DEFAULT_PROMPT_INTUT_TYPES from langflow.utils.util import escape_json_dump @@ -41,14 +51,15 @@ def update_projects_components_with_latest_component_versions(project_data, all_ # and update it all all_types_dict_flat = {} for category in all_types_dict.values(): - for component in category.values(): - all_types_dict_flat[component["display_name"]] = component + for key, component in category.items(): + all_types_dict_flat[key] = component # noqa: PERF403 node_changes_log = defaultdict(list) project_data_copy = deepcopy(project_data) for node in project_data_copy.get("nodes", []): node_data = node.get("data").get("node") - if node_data.get("display_name") in all_types_dict_flat: - latest_node = all_types_dict_flat.get(node_data.get("display_name")) + node_type = node.get("data").get("type") + if node_type in all_types_dict_flat: + latest_node = all_types_dict_flat.get(node_type) latest_template = latest_node.get("template") node_data["template"]["code"] = latest_template["code"] @@ -56,12 +67,12 @@ def update_projects_components_with_latest_component_versions(project_data, all_ node_data["outputs"] = latest_node["outputs"] if node_data["template"]["_type"] != latest_template["_type"]: node_data["template"]["_type"] = latest_template["_type"] - if node_data.get("display_name") != "Prompt": + if node_type != "Prompt": node_data["template"] = latest_template else: for key, value in latest_template.items(): if key not in node_data["template"]: - node_changes_log[node_data["display_name"]].append( + node_changes_log[node_type].append( { "attr": key, "old_value": None, @@ -70,7 +81,7 @@ def update_projects_components_with_latest_component_versions(project_data, all_ ) node_data["template"][key] = value elif isinstance(value, dict) and value.get("value"): - node_changes_log[node_data["display_name"]].append( + node_changes_log[node_type].append( { "attr": key, "old_value": node_data["template"][key], @@ -78,10 +89,10 @@ def update_projects_components_with_latest_component_versions(project_data, all_ } ) node_data["template"][key]["value"] = value["value"] - for key, value in node_data["template"].items(): + for key in node_data["template"]: if key not in latest_template: node_data["template"][key]["input_types"] = DEFAULT_PROMPT_INTUT_TYPES - node_changes_log[node_data["display_name"]].append( + node_changes_log[node_type].append( { "attr": "_type", "old_value": node_data["template"]["_type"], @@ -90,17 +101,19 @@ def update_projects_components_with_latest_component_versions(project_data, all_ ) else: for attr in NODE_FORMAT_ATTRIBUTES: - if attr in latest_node: + if ( + attr in latest_node # Check if it needs to be updated - if latest_node[attr] != node_data.get(attr): - node_changes_log[node_data["display_name"]].append( - { - "attr": attr, - "old_value": node_data.get(attr), - "new_value": latest_node[attr], - } - ) - node_data[attr] = latest_node[attr] + and latest_node[attr] != node_data.get(attr) + ): + node_changes_log[node_type].append( + { + "attr": attr, + "old_value": node_data.get(attr), + "new_value": latest_node[attr], + } + ) + node_data[attr] = latest_node[attr] for field_name, field_dict in latest_template.items(): if field_name not in node_data["template"]: @@ -109,19 +122,22 @@ def update_projects_components_with_latest_component_versions(project_data, all_ # The idea here is to update some attributes of the field to_check_attributes = FIELD_FORMAT_ATTRIBUTES for attr in to_check_attributes: - if attr in field_dict and attr in node_data["template"].get(field_name): + if ( + attr in field_dict + and attr in node_data["template"].get(field_name) # Check if it needs to be updated - if field_dict[attr] != node_data["template"][field_name][attr]: - node_changes_log[node_data["display_name"]].append( - { - "attr": f"{field_name}.{attr}", - "old_value": node_data["template"][field_name][attr], - "new_value": field_dict[attr], - } - ) - node_data["template"][field_name][attr] = field_dict[attr] + and field_dict[attr] != node_data["template"][field_name][attr] + ): + node_changes_log[node_type].append( + { + "attr": f"{field_name}.{attr}", + "old_value": node_data["template"][field_name][attr], + "new_value": field_dict[attr], + } + ) + node_data["template"][field_name][attr] = field_dict[attr] # Remove fields that are not in the latest template - if node_data.get("display_name") != "Prompt": + if node_type != "Prompt": for field_name in list(node_data["template"].keys()): if field_name not in latest_template: node_data["template"].pop(field_name) @@ -156,7 +172,7 @@ def update_new_output(data): new_source_handle["output_types"] = new_source_handle["baseClasses"] del new_source_handle["baseClasses"] - if "inputTypes" in new_target_handle and new_target_handle["inputTypes"]: + if new_target_handle.get("inputTypes"): intersection = [ type_ for type_ in new_source_handle["output_types"] if type_ in new_target_handle["inputTypes"] ] @@ -230,10 +246,12 @@ def update_edges_with_latest_component_versions(project_data): target_handle = scape_json_parse(target_handle) # Now find the source and target nodes in the nodes list source_node = next( - (node for node in project_data.get("nodes", []) if node.get("id") == edge.get("source")), None + (node for node in project_data.get("nodes", []) if node.get("id") == edge.get("source")), + None, ) target_node = next( - (node for node in project_data.get("nodes", []) if node.get("id") == edge.get("target")), None + (node for node in project_data.get("nodes", []) if node.get("id") == edge.get("target")), + None, ) if source_node and target_node: source_node_data = source_node.get("data").get("node") @@ -274,16 +292,17 @@ def update_edges_with_latest_component_versions(project_data): source_handle["output_types"] = new_output_types field_name = target_handle.get("fieldName") - if field_name in target_node_data.get("template"): - if target_handle["inputTypes"] != target_node_data.get("template").get(field_name).get("input_types"): - edge_changes_log[target_node_data["display_name"]].append( - { - "attr": "inputTypes", - "old_value": target_handle["inputTypes"], - "new_value": target_node_data.get("template").get(field_name).get("input_types"), - } - ) - target_handle["inputTypes"] = target_node_data.get("template").get(field_name).get("input_types") + if field_name in target_node_data.get("template") and target_handle["inputTypes"] != target_node_data.get( + "template" + ).get(field_name).get("input_types"): + edge_changes_log[target_node_data["display_name"]].append( + { + "attr": "inputTypes", + "old_value": target_handle["inputTypes"], + "new_value": target_node_data.get("template").get(field_name).get("input_types"), + } + ) + target_handle["inputTypes"] = target_node_data.get("template").get(field_name).get("input_types") escaped_source_handle = escape_json_dump(source_handle) escaped_target_handle = escape_json_dump(target_handle) try: @@ -321,7 +340,7 @@ def update_edges_with_latest_component_versions(project_data): return project_data_copy -def log_node_changes(node_changes_log): +def log_node_changes(node_changes_log) -> None: # The idea here is to log the changes that were made to the nodes in debug # Something like: # Node: "Node Name" was updated with the following changes: @@ -343,37 +362,42 @@ def load_starter_projects(retries=3, delay=1) -> list[tuple[Path, dict]]: for file in folder.glob("*.json"): attempt = 0 while attempt < retries: - with open(file, "r", encoding="utf-8") as f: - try: - project = orjson.loads(f.read()) - starter_projects.append((file, project)) - logger.info(f"Loaded starter project {file}") - break # Break if load is successful - except orjson.JSONDecodeError as e: - attempt += 1 - if attempt >= retries: - raise ValueError(f"Error loading starter project {file}: {e}") - time.sleep(delay) # Wait before retrying + content = file.read_text(encoding="utf-8") + try: + project = orjson.loads(content) + starter_projects.append((file, project)) + logger.info(f"Loaded starter project {file}") + break # Break if load is successful + except orjson.JSONDecodeError as e: + attempt += 1 + if attempt >= retries: + msg = f"Error loading starter project {file}: {e}" + raise ValueError(msg) from e + time.sleep(delay) # Wait before retrying return starter_projects -def copy_profile_pictures(): +def copy_profile_pictures() -> None: config_dir = get_storage_service().settings_service.settings.config_dir + if config_dir is None: + msg = "Config dir is not set in the settings" + raise ValueError(msg) origin = Path(__file__).parent / "profile_pictures" target = Path(config_dir) / "profile_pictures" - if not os.path.exists(origin): - raise ValueError(f"The source folder '{origin}' does not exist.") + if not origin.exists(): + msg = f"The source folder '{origin}' does not exist." + raise ValueError(msg) - if not os.path.exists(target): - os.makedirs(target) + if not target.exists(): + target.mkdir(parents=True) try: shutil.copytree(origin, target, dirs_exist_ok=True) logger.debug(f"Folder copied from '{origin}' to '{target}'") - except Exception as e: - logger.error(f"Error copying the folder: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error copying the folder") def get_project_data(project): @@ -382,17 +406,15 @@ def get_project_data(project): project_is_component = project.get("is_component") project_updated_at = project.get("updated_at") if not project_updated_at: - project_updated_at = datetime.now(tz=timezone.utc).isoformat() - updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f%z") + updated_at_datetime = datetime.now(tz=timezone.utc) else: - updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f") + updated_at_datetime = datetime.fromisoformat(project_updated_at) project_data = project.get("data") project_icon = project.get("icon") - if project_icon and purely_emoji(project_icon): - project_icon = demojize(project_icon) - else: - project_icon = "" + project_icon = demojize(project_icon) if project_icon and purely_emoji(project_icon) else project_icon project_icon_bg_color = project.get("icon_bg_color") + project_gradient = project.get("gradient") + project_tags = project.get("tags") return ( project_name, project_description, @@ -401,13 +423,14 @@ def get_project_data(project): project_data, project_icon, project_icon_bg_color, + project_gradient, + project_tags, ) -def update_project_file(project_path, project, updated_project_data): +def update_project_file(project_path: Path, project: dict, updated_project_data) -> None: project["data"] = updated_project_data - with open(project_path, "w", encoding="utf-8") as f: - f.write(orjson.dumps(project, option=ORJSON_OPTIONS).decode()) + project_path.write_text(orjson.dumps(project, option=ORJSON_OPTIONS).decode(), encoding="utf-8") logger.info(f"Updated starter project {project['name']} file") @@ -420,7 +443,7 @@ def update_existing_project( project_data, project_icon, project_icon_bg_color, -): +) -> None: logger.info(f"Updating starter project {project_name}") existing_project.data = project_data existing_project.folder = STARTER_FOLDER_NAME @@ -438,10 +461,12 @@ def create_new_project( project_is_component, updated_at_datetime, project_data, + project_gradient, + project_tags, project_icon, project_icon_bg_color, new_folder_id, -): +) -> None: logger.debug(f"Creating starter project {project_name}") new_project = FlowCreate( name=project_name, @@ -452,17 +477,18 @@ def create_new_project( is_component=project_is_component, updated_at=updated_at_datetime, folder_id=new_folder_id, + gradient=project_gradient, + tags=project_tags, ) db_flow = Flow.model_validate(new_project, from_attributes=True) session.add(db_flow) def get_all_flows_similar_to_project(session, folder_id): - flows = session.exec(select(Folder).where(Folder.id == folder_id)).first().flows - return flows + return session.exec(select(Folder).where(Folder.id == folder_id)).first().flows -def delete_start_projects(session, folder_id): +def delete_start_projects(session, folder_id) -> None: flows = session.exec(select(Folder).where(Folder.id == folder_id)).first().flows for flow in flows: session.delete(flow) @@ -482,8 +508,7 @@ def create_starter_folder(session): session.commit() session.refresh(db_folder) return db_folder - else: - return session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() + return session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() def _is_valid_uuid(val): @@ -494,7 +519,12 @@ def _is_valid_uuid(val): return str(uuid_obj) == val -def load_flows_from_directory(): +def load_flows_from_directory() -> None: + """On langflow startup, this loads all flows from the directory specified in the settings. + + All flows are uploaded into the default folder for the superuser. + Note that this feature currently only works if AUTO_LOGIN is enabled in the settings. + """ settings_service = get_settings_service() flows_path = settings_service.settings.load_flows_path if not flows_path: @@ -504,57 +534,72 @@ def load_flows_from_directory(): return with session_scope() as session: - user_id = get_user_by_username(session, settings_service.auth_settings.SUPERUSER).id - files = [f for f in os.listdir(flows_path) if os.path.isfile(os.path.join(flows_path, f))] - for filename in files: - if not filename.endswith(".json"): + user = get_user_by_username(session, settings_service.auth_settings.SUPERUSER) + if user is None: + msg = "Superuser not found in the database" + raise NoResultFound(msg) + user_id = user.id + _flows_path = Path(flows_path) + files = [f for f in _flows_path.iterdir() if f.is_file()] + for f in files: + if f.suffix != ".json": continue - logger.info(f"Loading flow from file: {filename}") - with open(os.path.join(flows_path, filename), "r", encoding="utf-8") as file: - flow = orjson.loads(file.read()) - no_json_name = filename.replace(".json", "") - flow_endpoint_name = flow.get("endpoint_name") - if _is_valid_uuid(no_json_name): - flow["id"] = no_json_name - flow_id = flow.get("id") - - existing = find_existing_flow(session, flow_id, flow_endpoint_name) - if existing: - logger.info(f"Updating existing flow: {flow_id} with endpoint name {flow_endpoint_name}") - for key, value in flow.items(): - if hasattr(existing, key): - # flow dict from json and db representation are not 100% the same - setattr(existing, key, value) - existing.updated_at = datetime.utcnow() - existing.user_id = user_id - session.add(existing) - session.commit() - else: - logger.info(f"Creating new flow: {flow_id} with endpoint name {flow_endpoint_name}") - flow["user_id"] = user_id - flow = Flow.model_validate(flow, from_attributes=True) - flow.updated_at = datetime.utcnow() - session.add(flow) - session.commit() + logger.info(f"Loading flow from file: {f.name}") + content = f.read_text(encoding="utf-8") + flow = orjson.loads(content) + no_json_name = f.stem + flow_endpoint_name = flow.get("endpoint_name") + if _is_valid_uuid(no_json_name): + flow["id"] = no_json_name + flow_id = flow.get("id") + + existing = find_existing_flow(session, flow_id, flow_endpoint_name) + if existing: + logger.debug(f"Found existing flow: {existing.name}") + logger.info(f"Updating existing flow: {flow_id} with endpoint name {flow_endpoint_name}") + for key, value in flow.items(): + if hasattr(existing, key): + # flow dict from json and db representation are not 100% the same + setattr(existing, key, value) + existing.updated_at = datetime.now(tz=timezone.utc).astimezone() + existing.user_id = user_id + + # Generally, folder_id should not be None, but we must check this due to the previous + # behavior where flows could be added and folder_id was None, orphaning + # them within Langflow. + if existing.folder_id is None: + folder_id = get_default_folder_id(session, user_id) + existing.folder_id = folder_id + + session.add(existing) + else: + logger.info(f"Creating new flow: {flow_id} with endpoint name {flow_endpoint_name}") + + # Current behavior loads all new flows into default folder + folder_id = get_default_folder_id(session, user_id) + + flow["user_id"] = user_id + flow["folder_id"] = folder_id + flow = Flow.model_validate(flow, from_attributes=True) + flow.updated_at = datetime.now(tz=timezone.utc).astimezone() + session.add(flow) def find_existing_flow(session, flow_id, flow_endpoint_name): if flow_endpoint_name: + logger.debug(f"flow_endpoint_name: {flow_endpoint_name}") stmt = select(Flow).where(Flow.endpoint_name == flow_endpoint_name) if existing := session.exec(stmt).first(): + logger.debug(f"Found existing flow by endpoint name: {existing.name}") return existing stmt = select(Flow).where(Flow.id == flow_id) if existing := session.exec(stmt).first(): + logger.debug(f"Found existing flow by id: {flow_id}") return existing return None -async def create_or_update_starter_projects(get_all_components_coro: Awaitable[dict]): - try: - all_types_dict = await get_all_components_coro - except Exception as e: - logger.exception(f"Error loading components: {e}") - raise e +def create_or_update_starter_projects(all_types_dict: dict) -> None: with session_scope() as session: new_folder = create_starter_folder(session) starter_projects = load_starter_projects() @@ -569,15 +614,13 @@ async def create_or_update_starter_projects(get_all_components_coro: Awaitable[d project_data, project_icon, project_icon_bg_color, + project_gradient, + project_tags, ) = get_project_data(project) updated_project_data = update_projects_components_with_latest_component_versions( project_data.copy(), all_types_dict ) updated_project_data = update_edges_with_latest_component_versions(updated_project_data) - try: - Graph.from_payload(updated_project_data) - except Exception as e: - logger.error(e) if updated_project_data != project_data: project_data = updated_project_data # We also need to update the project data in the file @@ -588,30 +631,32 @@ async def create_or_update_starter_projects(get_all_components_coro: Awaitable[d session.delete(existing_project) create_new_project( - session, - project_name, - project_description, - project_is_component, - updated_at_datetime, - project_data, - project_icon, - project_icon_bg_color, - new_folder.id, + session=session, + project_name=project_name, + project_description=project_description, + project_is_component=project_is_component, + updated_at_datetime=updated_at_datetime, + project_data=project_data, + project_icon=project_icon, + project_icon_bg_color=project_icon_bg_color, + project_gradient=project_gradient, + project_tags=project_tags, + new_folder_id=new_folder.id, ) -def initialize_super_user_if_needed(): +def initialize_super_user_if_needed() -> None: settings_service = get_settings_service() if not settings_service.auth_settings.AUTO_LOGIN: return username = settings_service.auth_settings.SUPERUSER password = settings_service.auth_settings.SUPERUSER_PASSWORD if not username or not password: - raise ValueError("SUPERUSER and SUPERUSER_PASSWORD must be set in the settings if AUTO_LOGIN is true.") + msg = "SUPERUSER and SUPERUSER_PASSWORD must be set in the settings if AUTO_LOGIN is true." + raise ValueError(msg) with session_scope() as session: super_user = create_super_user(db=session, username=username, password=password) get_variable_service().initialize_user_variables(super_user.id, session) create_default_folder_if_it_doesnt_exist(session, super_user.id) - session.commit() logger.info("Super user initialized") diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompt Chaining.json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompt Chaining.json new file mode 100644 index 000000000000..5294252573f2 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompt Chaining.json @@ -0,0 +1,2053 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-jhEkE", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-FlJ4C", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-jhEkE{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-jhEkEœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-FlJ4C{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-FlJ4Cœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-jhEkE", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-jhEkEœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-FlJ4C", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-FlJ4Cœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-Lmlx2", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-cn7cK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-Lmlx2{œdataTypeœ:œPromptœ,œidœ:œPrompt-Lmlx2œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-cn7cK{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-cn7cKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-Lmlx2", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-Lmlx2œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-cn7cK", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-cn7cKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-MbPd9", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-cn7cK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-MbPd9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-MbPd9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-cn7cK{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-cn7cKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-MbPd9", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-MbPd9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-cn7cK", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-cn7cKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-TbFnC", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-xWhtK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-TbFnC{œdataTypeœ:œPromptœ,œidœ:œPrompt-TbFnCœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-xWhtK{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-xWhtKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-TbFnC", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-TbFnCœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-xWhtK", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-xWhtKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-cn7cK", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-xWhtK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-cn7cK{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-cn7cKœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-xWhtK{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-xWhtKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-cn7cK", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-cn7cKœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-xWhtK", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-xWhtKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-3CcjN", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-jhEkE", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-3CcjN{œdataTypeœ:œPromptœ,œidœ:œPrompt-3CcjNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-jhEkE{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-jhEkEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-3CcjN", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-3CcjNœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-jhEkE", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-jhEkEœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-xWhtK", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-jhEkE", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-xWhtK{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-xWhtKœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-jhEkE{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-jhEkEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-xWhtK", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-xWhtKœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-jhEkE", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-jhEkEœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-TbFnC", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a seasoned business analyst with a strong background in tech product development and market research. Your analytical skills are unparalleled, allowing you to dissect product concepts and evaluate their market viability with precision. You have a keen eye for identifying potential challenges and opportunities that others might overlook. Your insights have been crucial in shaping successful product strategies for numerous tech companies.\n\nYour task is to:\n\n1. Evaluate the concept in terms of market potential and technical feasibility\n2. Identify two potential challenges for developing this product\n3. Suggest one improvement or expansion to the concept\n\n\nPlease structure your response as follows:\n\nConcept Evaluation:\n[concept_evaluation]\n\nPotential Challenges:\n1. [challenge_1]\n2. [challenge_2]\n...\n\nImprovement Suggestion:\n[improvement_suggestion]\n\nProvide an objective and well-founded analysis, considering market and technological factors in your evaluation.\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-TbFnC", + "position": { + "x": 1921.9168573384, + "y": 1162.4082184281983 + }, + "positionAbsolute": { + "x": 1921.9168573384, + "y": 1162.4082184281983 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatInput-MbPd9", + "node": { + "base_classes": ["Message"], + "beta": false, + "category": "inputs", + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "key": "ChatInput", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "The growing demand for personalized, AI-driven mental health support tools that can provide real-time interventions and track long-term emotional well-being." + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-MbPd9", + "position": { + "x": 1178.0239685549568, + "y": 879.9087836229152 + }, + "positionAbsolute": { + "x": 1178.0239685549568, + "y": 879.9087836229152 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-FlJ4C", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-FlJ4C", + "position": { + "x": 3363.868906129255, + "y": 1189.5351768654318 + }, + "positionAbsolute": { + "x": 3363.868906129255, + "y": 1189.5351768654318 + }, + "selected": true, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-3CcjN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are an accomplished product manager with a track record of bringing innovative tech products from concept to market. Your strategic thinking and ability to balance technical feasibility with market demands have resulted in several successful product launches. You excel at distilling complex ideas into clear, actionable plans and have a talent for identifying the most critical features that will drive product adoption and success.\n\nBased on the analysis of the innovative product, create a simplified development plan that includes:\n\n1. Product overview (1-2 sentences)\n2. Three main features to be developed\n3. A basic market launch strategy\n\n\nPlease structure your plan as follows:\n\nProduct Overview:\n[product_overview]\n\nMain Features:\n1. [feature_1]\n2. [feature_2]\n3. [feature_3]\n...\n\nLaunch Strategy:\n[launch_strategy]\n\nYour plan should be concise, realistic, and aligned with the information provided in the previous steps.\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-3CcjN", + "position": { + "x": 2647.8305106628454, + "y": 1161.2328062686402 + }, + "positionAbsolute": { + "x": 2647.8305106628454, + "y": 1161.2328062686402 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-cn7cK", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-cn7cK", + "position": { + "x": 1561.5122766985614, + "y": 805.4323582784689 + }, + "positionAbsolute": { + "x": 1561.5122766985614, + "y": 805.4323582784689 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-jhEkE", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-jhEkE", + "position": { + "x": 3013.354174710099, + "y": 800.0470124871745 + }, + "positionAbsolute": { + "x": 3013.354174710099, + "y": 800.0470124871745 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-Lmlx2", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a visionary product innovator at a cutting-edge tech startup. Your expertise lies in identifying emerging market trends and translating them into groundbreaking product concepts. Your creative thinking and deep understanding of technology allow you to envision products that not only meet current needs but also anticipate future demands. Your ideas often challenge conventional thinking and push the boundaries of what's possible with current technology.\n\nPlease create a product concept, providing:\n\n1. Product name\n2. Brief description (2-3 sentences)\n3. Main innovative feature\n4. Target audience\n\nStructure your response like this:\n\nProduct Name: [product_name]\n\nDescription: [product_description]\n\nMain Innovation: [main_innovation]\n\nTarget Audience: [target_audience]\n\nBe creative and bold in your idea, but keep it realistic and aligned with the provided market trend." + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-Lmlx2", + "position": { + "x": 1178.7099500302636, + "y": 1167.8586867404465 + }, + "positionAbsolute": { + "x": 1178.7099500302636, + "y": 1167.8586867404465 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-tSZsX", + "node": { + "description": "### Input Examples\n1.\n \"The growing demand for personalized, AI-driven mental health support tools that can provide real-time interventions and track long-term emotional well-being.\"\n\n\n2. \n \"The increasing need for secure and user-friendly decentralized finance (DeFi) platforms that make cryptocurrency investments accessible to non-tech-savvy users.\"\n \n\n3. \n \"The rising popularity of immersive, augmented reality (AR) experiences for remote collaboration and virtual team-building in distributed workforces.\"\n\n\n4. \n \"The expanding market for smart, IoT-enabled urban farming solutions that allow city dwellers to grow their own food efficiently in small spaces.\"\n\n\n5. \n \"The emerging demand for AI-powered personal styling and shopping assistants that consider sustainability, body positivity, and individual style preferences.\"\n\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "emerald" + } + }, + "type": "note" + }, + "dragging": false, + "height": 430, + "id": "note-tSZsX", + "position": { + "x": 528.0392006831054, + "y": 973.781986567496 + }, + "positionAbsolute": { + "x": 528.0392006831054, + "y": 973.781986567496 + }, + "resizing": false, + "selected": false, + "style": { + "height": 430, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "note-FCbTW", + "node": { + "description": "### Prompt Chaining\n\nThis flow demonstrates fundamental prompt chaining principles:\n\n1. **Chain Structure**\n • User input → First Prompt → LLM\n • First output → Second Prompt → LLM\n • Second output → Final Prompt → LLM\n • Final output\n\n2. **Key Technique Elements**\n • Each prompt is specifically designed to process previous output\n • Output formatting ensures clean handoff between stages\n • Context flows naturally through the chain\n • Each LLM call builds upon previous results\n\n3. **Technical Implementation**\n • Multiple prompt templates working in sequence\n • Strategic input/output connections\n • Consistent message handling between stages\n • Progressive refinement through the chain\n\nThis pattern can be adapted for any use case by modifying the prompt templates while keeping the same chaining structure.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 451, + "id": "note-FCbTW", + "position": { + "x": 892.4280059782889, + "y": 406.2411111617474 + }, + "positionAbsolute": { + "x": 892.4280059782889, + "y": 406.2411111617474 + }, + "resizing": false, + "selected": false, + "style": { + "height": 451, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-xWhtK", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-xWhtK", + "position": { + "x": 2294.5699142425065, + "y": 796.3787911368381 + }, + "positionAbsolute": { + "x": 2294.5699142425065, + "y": 796.3787911368381 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -648.2477908002534, + "y": -146.07166399594246, + "zoom": 0.5557618545546786 + } + }, + "description": "Connect multiple prompts in sequence where each output becomes the next stage's input, enabling step-by-step text processing.", + "endpoint_name": null, + "icon": "Link", + "id": "b5b0c252-95ae-4b07-8211-67c8b12ea60e", + "gradient": "0", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Prompt Chaining", + "tags": ["chatbots"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json deleted file mode 100644 index c79bdb1070d1..000000000000 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ /dev/null @@ -1,924 +0,0 @@ -{ - "data": { - "edges": [ - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-jbtaD", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "user_input", - "id": "Prompt-0SBd6", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-jbtaD{œdataTypeœ:œChatInputœ,œidœ:œChatInput-jbtaDœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-0SBd6{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-0SBd6œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-jbtaD", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-jbtaDœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-0SBd6", - "targetHandle": "{œfieldNameœ: œuser_inputœ, œidœ: œPrompt-0SBd6œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-0SBd6", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-HBuxy", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-0SBd6{œdataTypeœ:œPromptœ,œidœ:œPrompt-0SBd6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HBuxy{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-HBuxyœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-0SBd6", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0SBd6œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-HBuxy", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-HBuxyœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-HBuxy", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-WG5tg", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-HBuxy{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-HBuxyœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-WG5tg{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-WG5tgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-HBuxy", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-HBuxyœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-WG5tg", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-WG5tgœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - } - ], - "nodes": [ - { - "data": { - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "id": "ChatInput-jbtaD", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "files" - ], - "frozen": false, - "icon": "ChatInput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "files": { - "advanced": true, - "display_name": "Files", - "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx", - "jpg", - "jpeg", - "png", - "bmp", - "image" - ], - "file_path": "", - "info": "Files to be sent with the message.", - "list": true, - "name": "files", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "file", - "value": "" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatInput" - }, - "dragging": false, - "height": 309, - "id": "ChatInput-jbtaD", - "position": { - "x": -493.6459512396177, - "y": 1083.200545525551 - }, - "positionAbsolute": { - "x": -493.6459512396177, - "y": 1083.200545525551 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-0SBd6", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "user_input" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "field_order": [ - "template" - ], - "frozen": false, - "icon": "prompts", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: " - }, - "user_input": { - "advanced": false, - "display_name": "user_input", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "user_input", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 423, - "id": "Prompt-0SBd6", - "position": { - "x": 56.354011530798516, - "y": 1157.2005405164796 - }, - "positionAbsolute": { - "x": 56.354011530798516, - "y": 1157.2005405164796 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-WG5tg", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "data_template": { - "advanced": true, - "display_name": "Data Template", - "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "data_template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Machine" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "AI" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 309, - "id": "ChatOutput-WG5tg", - "position": { - "x": 1219.477374823274, - "y": 1200.950216973985 - }, - "positionAbsolute": { - "x": 1219.477374823274, - "y": 1200.950216973985 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-HBuxy", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-HBuxy", - "position": { - "x": 664.0296638933031, - "y": 1026.5966174731725 - }, - "positionAbsolute": { - "x": 664.0296638933031, - "y": 1026.5966174731725 - }, - "selected": false, - "type": "genericNode", - "width": 384 - } - ], - "viewport": { - "x": 427.12410642709614, - "y": -361.39815091467085, - "zoom": 0.5562299357713679 - } - }, - "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", - "endpoint_name": null, - "id": "b0e19aab-2095-41ee-b91c-1168790cc68b", - "is_component": false, - "last_tested_version": "1.0.9", - "name": "Basic Prompting (Hello, World)" -} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting.json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting.json new file mode 100644 index 000000000000..ea71c7b74df9 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting.json @@ -0,0 +1,1061 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-euzzD", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-mMCO4", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-euzzD{œdataTypeœ:œPromptœ,œidœ:œPrompt-euzzDœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-mMCO4{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-mMCO4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-euzzD", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-euzzDœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-mMCO4", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-mMCO4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-M2ktx", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-mMCO4", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-M2ktx{œdataTypeœ:œChatInputœ,œidœ:œChatInput-M2ktxœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-mMCO4{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-mMCO4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-M2ktx", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-M2ktxœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-mMCO4", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-mMCO4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-mMCO4", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-1s4P8", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-mMCO4{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-mMCO4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-1s4P8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-1s4P8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-mMCO4", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-mMCO4œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-1s4P8", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-1s4P8œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-M2ktx", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Hello" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-M2ktx", + "position": { + "x": 689.5720422421635, + "y": 765.155834131403 + }, + "positionAbsolute": { + "x": 689.5720422421635, + "y": 765.155834131403 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-euzzD", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "Answer the user as if you were a pirate." + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-euzzD", + "position": { + "x": 690.2015147036818, + "y": 1018.5443911764344 + }, + "positionAbsolute": { + "x": 690.2015147036818, + "y": 1018.5443911764344 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "undefined-emdQy", + "node": { + "description": "### ✅ Basic System Prompting README\n\nExperiment with AI behavior control using system prompts. \n\n#### Component Overview\n- **Chat Input:** User message entry point\n- **System Message:** Sets AI personality/behavior\n- **OpenAI Model:** Processes both prompts and generates responses\n- **Chat Output:** Displays the AI response in the Playground\n\n#### Quick Start\n- Add your **OpenAI API key** to the **OpenAI Model**\n- Modify the **System Prompt** template to change AI behavior\n- Use the **Playground** to start chatting\n\nThe default prompt makes the AI respond like a pirate! Try changing it to create different AI personalities.\n\nFor more details, check the [system prompting guide](https://docs.langflow.org/guides/system-prompting).", + "display_name": "Read Me", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + } + }, + "dragging": false, + "height": 561, + "id": "undefined-emdQy", + "position": { + "x": 66.38770028934243, + "y": 749.744424427066 + }, + "positionAbsolute": { + "x": 66.38770028934243, + "y": 749.744424427066 + }, + "resizing": false, + "selected": true, + "style": { + "height": 561, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-mMCO4", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-mMCO4", + "position": { + "x": 1081.0157946607428, + "y": 707.3740542546418 + }, + "positionAbsolute": { + "x": 1081.0157946607428, + "y": 707.3740542546418 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatOutput-1s4P8", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-1s4P8", + "position": { + "x": 1444.936881624563, + "y": 872.7273956769025 + }, + "positionAbsolute": { + "x": 1444.936881624563, + "y": 872.7273956769025 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -21.631817700819965, + "y": -334.5576887147924, + "zoom": 0.7749929474098888 + } + }, + "description": "Get started with a simple prompt engineering flow. Customize AI responses by adjusting the system prompt template to create varied personalities.", + "endpoint_name": null, + "icon": "Braces", + "id": "1511c230-d446-43a7-bfc3-539e69ce05b8", + "gradient": "2", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Basic Prompting", + "tags": ["chatbots"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index 24651b62089c..8797902df058 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -2,136 +2,119 @@ "data": { "edges": [ { + "animated": false, "className": "", "data": { "sourceHandle": { "dataType": "URL", - "id": "URL-76lwY", + "id": "URL-ewId1", "name": "data", - "output_types": [ - "Data" - ] + "output_types": ["Data"] }, "targetHandle": { "fieldName": "data", - "id": "ParseData-jYhXf", - "inputTypes": [ - "Data" - ], + "id": "ParseData-Zwm0z", + "inputTypes": ["Data"], "type": "other" } }, - "id": "reactflow__edge-URL-76lwY{œdataTypeœ:œURLœ,œidœ:œURL-76lwYœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-jYhXf{œfieldNameœ:œdataœ,œidœ:œParseData-jYhXfœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "URL-76lwY", - "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-76lwYœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-jYhXf", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-jYhXfœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-URL-ewId1{œdataTypeœ:œURLœ,œidœ:œURL-ewId1œ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-Zwm0z{œfieldNameœ:œdataœ,œidœ:œParseData-Zwm0zœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "URL-ewId1", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-ewId1œ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-Zwm0z", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-Zwm0zœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { "dataType": "ParseData", - "id": "ParseData-jYhXf", + "id": "ParseData-Zwm0z", "name": "text", - "output_types": [ - "Message" - ] + "output_types": ["Message"] }, "targetHandle": { "fieldName": "references", - "id": "Prompt-ABI8S", - "inputTypes": [ - "Message", - "Text" - ], + "id": "Prompt-WTVXx", + "inputTypes": ["Message", "Text"], "type": "str" } }, - "id": "reactflow__edge-ParseData-jYhXf{œdataTypeœ:œParseDataœ,œidœ:œParseData-jYhXfœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-ABI8S{œfieldNameœ:œreferencesœ,œidœ:œPrompt-ABI8Sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-jYhXf", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-jYhXfœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-ABI8S", - "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-ABI8Sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-Zwm0z{œdataTypeœ:œParseDataœ,œidœ:œParseData-Zwm0zœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-WTVXx{œfieldNameœ:œreferencesœ,œidœ:œPrompt-WTVXxœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-Zwm0z", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-Zwm0zœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WTVXx", + "targetHandle": "{œfieldNameœ: œreferencesœ, œidœ: œPrompt-WTVXxœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { "dataType": "TextInput", - "id": "TextInput-jiXJB", + "id": "TextInput-PAceh", "name": "text", - "output_types": [ - "Message" - ] + "output_types": ["Message"] }, "targetHandle": { "fieldName": "instructions", - "id": "Prompt-ABI8S", - "inputTypes": [ - "Message", - "Text" - ], + "id": "Prompt-WTVXx", + "inputTypes": ["Message", "Text"], "type": "str" } }, - "id": "reactflow__edge-TextInput-jiXJB{œdataTypeœ:œTextInputœ,œidœ:œTextInput-jiXJBœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-ABI8S{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-ABI8Sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-jiXJB", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-jiXJBœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-ABI8S", - "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-ABI8Sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-TextInput-PAceh{œdataTypeœ:œTextInputœ,œidœ:œTextInput-PAcehœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-WTVXx{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-WTVXxœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-PAceh", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-PAcehœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WTVXx", + "targetHandle": "{œfieldNameœ: œinstructionsœ, œidœ: œPrompt-WTVXxœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { "dataType": "Prompt", - "id": "Prompt-ABI8S", + "id": "Prompt-WTVXx", "name": "prompt", - "output_types": [ - "Message" - ] + "output_types": ["Message"] }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-JBO2p", - "inputTypes": [ - "Message" - ], + "id": "OpenAIModel-s4uQ3", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-Prompt-ABI8S{œdataTypeœ:œPromptœ,œidœ:œPrompt-ABI8Sœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-JBO2p{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-JBO2pœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-ABI8S", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-ABI8Sœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-JBO2p", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-JBO2pœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-WTVXx{œdataTypeœ:œPromptœ,œidœ:œPrompt-WTVXxœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-s4uQ3{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-s4uQ3œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-WTVXx", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-WTVXxœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-s4uQ3", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-s4uQ3œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { "dataType": "OpenAIModel", - "id": "OpenAIModel-JBO2p", + "id": "OpenAIModel-s4uQ3", "name": "text_output", - "output_types": [ - "Message" - ] + "output_types": ["Message"] }, "targetHandle": { "fieldName": "input_value", - "id": "ChatOutput-uaX6T", - "inputTypes": [ - "Message" - ], + "id": "ChatOutput-uL64L", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-JBO2p{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-JBO2pœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-uaX6T{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-uaX6Tœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-JBO2p", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-JBO2pœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-uaX6T", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-uaX6Tœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-s4uQ3{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-s4uQ3œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-uL64L{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-uL64Lœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-s4uQ3", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-s4uQ3œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-uL64L", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-uL64Lœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ @@ -139,11 +122,9 @@ "data": { "description": "Fetch content from one or more URLs.", "display_name": "URL", - "id": "URL-76lwY", + "id": "URL-ewId1", "node": { - "base_classes": [ - "Data" - ], + "base_classes": ["Data"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -151,11 +132,12 @@ "display_name": "URL", "documentation": "", "edited": false, - "field_order": [ - "urls" - ], + "field_order": ["urls"], "frozen": false, "icon": "layout-template", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -164,9 +146,16 @@ "method": "fetch_content", "name": "data", "selected": "Data", - "types": [ - "Data" - ], + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Text", + "method": "fetch_content_text", + "name": "text", + "selected": "Message", + "types": ["Message"], "value": "__UNDEFINED__" } ], @@ -189,16 +178,31 @@ "show": true, "title_case": false, "type": "code", - "value": "import re\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema import Data\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"\n Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n raise ValueError(f\"Invalid URL: {string}\")\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n" + "value": "import re\n\nfrom langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output format\",\n info=\"Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.\",\n options=[\"Text\", \"Raw HTML\"],\n value=\"Text\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Text\", name=\"text\", method=\"fetch_content_text\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n msg = f\"Invalid URL: {string}\"\n raise ValueError(msg)\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n if self.format == \"Raw HTML\":\n loader = AsyncHtmlLoader(web_path=urls, encoding=\"utf-8\")\n else:\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n data = self.fetch_content()\n\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "format": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Output format", + "dynamic": false, + "info": "Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", + "name": "format", + "options": ["Text", "Raw HTML"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Text" }, "urls": { "advanced": false, "display_name": "URLs", "dynamic": false, "info": "Enter one or more URLs, by clicking the '+' button.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": true, "load_from_db": false, "name": "urls", @@ -209,39 +213,34 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": [ - "langflow.org/", - "docs.langflow.org/" - ] + "value": ["https://langflow.org/", "https://docs.langflow.org/"] } } }, "type": "URL" }, "dragging": false, - "height": 359, - "id": "URL-76lwY", + "height": 418, + "id": "URL-ewId1", "position": { - "x": 220.79156431407534, - "y": 498.8186168722667 + "x": 544.3302626182673, + "y": 495.2237424466647 }, "positionAbsolute": { - "x": 220.79156431407534, - "y": 498.8186168722667 + "x": 544.3302626182673, + "y": 495.2237424466647 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { "description": "Convert Data into plain text following a specified template.", "display_name": "Parse Data", - "id": "ParseData-jYhXf", + "id": "ParseData-Zwm0z", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -249,13 +248,12 @@ "display_name": "Parse Data", "documentation": "", "edited": false, - "field_order": [ - "data", - "template", - "sep" - ], + "field_order": ["data", "template", "sep"], "frozen": false, "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -264,9 +262,7 @@ "method": "parse_data", "name": "text", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], @@ -289,16 +285,14 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" }, "data": { "advanced": false, "display_name": "Data", "dynamic": false, "info": "The data to convert to text.", - "input_types": [ - "Data" - ], + "input_types": ["Data"], "list": false, "name": "data", "placeholder": "", @@ -331,9 +325,7 @@ "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "multiline": true, @@ -352,46 +344,42 @@ "type": "ParseData" }, "dragging": false, - "height": 385, - "id": "ParseData-jYhXf", + "height": 302, + "id": "ParseData-Zwm0z", "position": { - "x": 754.3607306709101, - "y": 736.8516961537598 + "x": 955.6736985046297, + "y": 702.7003891105396 }, "positionAbsolute": { - "x": 754.3607306709101, - "y": 736.8516961537598 + "x": 955.6736985046297, + "y": 702.7003891105396 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", - "id": "Prompt-ABI8S", + "id": "Prompt-WTVXx", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": { - "template": [ - "references", - "instructions" - ] + "template": ["references", "instructions"] }, "description": "Create a prompt template with dynamic variables.", "display_name": "Prompt", "documentation": "", "edited": false, - "field_order": [ - "template" - ], + "field_order": ["template"], "frozen": false, "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -400,9 +388,7 @@ "method": "build_prompt", "name": "prompt", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], @@ -425,7 +411,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" }, "instructions": { "advanced": false, @@ -435,10 +421,7 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": [ - "Message", - "Text" - ], + "input_types": ["Message", "Text"], "list": false, "load_from_db": false, "multiline": true, @@ -459,10 +442,7 @@ "fileTypes": [], "file_path": "", "info": "", - "input_types": [ - "Message", - "Text" - ], + "input_types": ["Message", "Text"], "list": false, "load_from_db": false, "multiline": true, @@ -496,29 +476,27 @@ "type": "Prompt" }, "dragging": false, - "height": 517, - "id": "Prompt-ABI8S", + "height": 433, + "id": "Prompt-WTVXx", "position": { - "x": 1368.0633591447076, - "y": 467.19448061224284 + "x": 1341.1018009526915, + "y": 456.4098573354365 }, "positionAbsolute": { - "x": 1368.0633591447076, - "y": 467.19448061224284 + "x": 1341.1018009526915, + "y": 456.4098573354365 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { "description": "Get text inputs from the Playground.", "display_name": "Instructions", - "id": "TextInput-jiXJB", + "id": "TextInput-PAceh", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -526,11 +504,12 @@ "display_name": "Instructions", "documentation": "", "edited": false, - "field_order": [ - "input_value" - ], + "field_order": ["input_value"], "frozen": false, "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -539,9 +518,7 @@ "method": "text_response", "name": "text", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], @@ -564,18 +541,18 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n" + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" }, "input_value": { + "_input_type": "MultilineInput", "advanced": false, "display_name": "Text", "dynamic": false, "info": "Text to be passed as input.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, + "multiline": true, "name": "input_value", "placeholder": "", "required": false, @@ -591,29 +568,27 @@ "type": "TextInput" }, "dragging": false, - "height": 309, - "id": "TextInput-jiXJB", + "height": 234, + "id": "TextInput-PAceh", "position": { - "x": 743.7338453293725, - "y": 301.58775454952183 + "x": 955.8314364398983, + "y": 402.24423846638155 }, "positionAbsolute": { - "x": 743.7338453293725, - "y": 301.58775454952183 + "x": 955.8314364398983, + "y": 402.24423846638155 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { "description": "Display a chat message in the Playground.", "display_name": "Chat Output", - "id": "ChatOutput-uaX6T", + "id": "ChatOutput-uL64L", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -630,7 +605,10 @@ "data_template" ], "frozen": false, - "icon": "ChatOutput", + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -639,15 +617,51 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, "code": { "advanced": true, "dynamic": true, @@ -664,16 +678,14 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "data_template": { "advanced": true, "display_name": "Data Template", "dynamic": false, "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "data_template", @@ -691,9 +703,7 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "input_value", @@ -712,10 +722,7 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": [ - "Machine", - "User" - ], + "options": ["Machine", "User"], "placeholder": "", "required": false, "show": true, @@ -729,9 +736,7 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "sender_name", @@ -749,9 +754,7 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "session_id", @@ -779,36 +782,52 @@ "trace_as_metadata": true, "type": "bool", "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" } } }, "type": "ChatOutput" }, "dragging": false, - "height": 309, - "id": "ChatOutput-uaX6T", + "height": 234, + "id": "ChatOutput-uL64L", "position": { - "x": 2449.3489426461606, - "y": 571.2449700910389 + "x": 2113.228183852361, + "y": 594.6116538574528 }, "positionAbsolute": { - "x": 2449.3489426461606, - "y": 571.2449700910389 + "x": 2113.228183852361, + "y": 594.6116538574528 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { "description": "Generates text using OpenAI LLMs.", "display_name": "OpenAI", - "id": "OpenAIModel-JBO2p", + "id": "OpenAIModel-s4uQ3", "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], + "base_classes": ["LanguageModel", "Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -832,6 +851,9 @@ ], "frozen": false, "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -839,10 +861,9 @@ "display_name": "Text", "method": "text_response", "name": "text_output", + "required_inputs": [], "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" }, { @@ -850,10 +871,9 @@ "display_name": "Language Model", "method": "build_model", "name": "model_output", + "required_inputs": [], "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], + "types": ["LanguageModel"], "value": "__UNDEFINED__" } ], @@ -866,9 +886,7 @@ "display_name": "OpenAI API Key", "dynamic": false, "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "load_from_db": true, "name": "api_key", "password": true, @@ -895,16 +913,14 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" }, "input_value": { "advanced": false, "display_name": "Input", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "input_value", @@ -951,7 +967,7 @@ "advanced": true, "display_name": "Model Kwargs", "dynamic": false, - "info": "", + "info": "Additional keyword arguments to pass to the model.", "list": false, "name": "model_kwargs", "placeholder": "", @@ -1001,11 +1017,28 @@ "type": "str", "value": "" }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, "output_schema": { "advanced": true, "display_name": "Schema", "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", "list": true, "name": "output_schema", "placeholder": "", @@ -1032,7 +1065,7 @@ "value": 1 }, "stream": { - "advanced": true, + "advanced": false, "display_name": "Stream", "dynamic": false, "info": "Stream the response from the model. Streaming works only in Chat.", @@ -1047,7 +1080,7 @@ "value": false }, "system_message": { - "advanced": true, + "advanced": false, "display_name": "System Message", "dynamic": false, "info": "System message to pass to the model.", @@ -1082,31 +1115,131 @@ "type": "OpenAIModel" }, "dragging": false, - "height": 623, - "id": "OpenAIModel-JBO2p", + "height": 495, + "id": "OpenAIModel-s4uQ3", "position": { - "x": 1950.3830456413473, - "y": 380.8161704718418 + "x": 1713.1213335516065, + "y": 456.3085334094866 }, "positionAbsolute": { - "x": 1950.3830456413473, - "y": 380.8161704718418 + "x": 1713.1213335516065, + "y": 456.3085334094866 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 + }, + { + "data": { + "id": "note-5zVjK", + "node": { + "description": "## URL Component Setup\n\n**Purpose:**\nFetch and process content from the web to use as reference material for creating a blog post.\n\n**Instructions:**\n1. **Input URLs**: List the URLs of web pages whose content you want to fetch. Ensure the URLs start with `http://` or `https://`.\n2. **Select Output Format**:\n - **Text**: To extract plain text from the pages.\n - **Raw HTML**: To retrieve the raw HTML content for advanced uses.\n\n**Tips**:\n- Double-check URL formats to prevent any data fetching errors.\n- Use the '+' button to add multiple URLs as needed for comprehensive references.\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 329, + "id": "note-5zVjK", + "position": { + "x": 484.73635938598477, + "y": 153.29803159918163 + }, + "positionAbsolute": { + "x": 484.73635938598477, + "y": 153.29803159918163 + }, + "resizing": false, + "selected": false, + "style": { + "height": 329, + "width": 414 + }, + "type": "noteNode", + "width": 414 + }, + { + "data": { + "id": "note-TrHJa", + "node": { + "description": "# Blog Writing Flow Overview\n\n**Workflow Description:**\nThis flow assists in creating a blog post by using content fetched from URLs and user-provided instructions. It combines external references and user inputs to generate coherent and context-rich text.\n\n**Components**:\n1. **URL Component**: Fetches reference content from specified web pages.\n2. **Parse Data**: Converts the fetched content into a text format.\n3. **Text Input**: Accepts user-specific instructions for the blog post.\n4. **Prompt with Variables**: Merges references and instructions into a dynamic writing prompt.\n5. **OpenAI Model**: Generates the blog post using an AI language model.\n6. **Chat Output**: Displays the final blog text for user review and further refinement.\n\n**Steps to Execute**:\n1. Enter the relevant URLs and specify the output format in the **URL Component**.\n2. Provide detailed writing **Instructions** for AI to follow.\n3. Run the flow to generate the blog and view the result in **Chat Output**.\n\n**Benefits**:\n- Simplifies blog creation by using AI to structure and write content.\n- Incorporates comprehensive reference material to enhance post depth and accuracy.", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 509, + "id": "note-TrHJa", + "position": { + "x": -78.41970365609802, + "y": 405.04114164010207 + }, + "positionAbsolute": { + "x": -78.41970365609802, + "y": 405.04114164010207 + }, + "resizing": false, + "selected": false, + "style": { + "height": 509, + "width": 562 + }, + "type": "noteNode", + "width": 562 + }, + { + "data": { + "id": "note-b1YaN", + "node": { + "description": "## Get Your OpenAI API Key\n**Steps**:\n1. **Visit** [OpenAI's API Key Page](https://platform.openai.com/api-keys).\n\n2. **Log In/Sign Up**:\n - Log in or create a new OpenAI account.\n\n3. **Generate API Key**:\n - Click \"Create New Secret Key\" to obtain your key.\n\n4. **Store Your Key Securely**:\n - Note it down as it will only display once.\n\n5. **Enter API Key**:\n - Input your key in the OpenAI API Key field within the component setup.\n\nKeep your key safe and manage it responsibly!", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 324, + "id": "note-b1YaN", + "position": { + "x": 1703.974189852056, + "y": 125.15366878585462 + }, + "positionAbsolute": { + "x": 1703.974189852056, + "y": 125.15366878585462 + }, + "resizing": false, + "selected": false, + "style": { + "height": 324, + "width": 343 + }, + "type": "noteNode", + "width": 343 } ], "viewport": { - "x": -52.959712994147594, - "y": 41.95510708899229, - "zoom": 0.5873729194514925 + "x": 94.80835919355627, + "y": 105.45081997045315, + "zoom": 0.5284811262210728 } }, - "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.", + "description": "Auto-generate a customized blog post from instructions and referenced articles.", "endpoint_name": null, - "id": "6b576678-66cd-4d6e-ab40-af1104f02c37", + "icon": "NotebookPen", + "id": "8b12aa0f-8b59-4806-a01f-5e545b5b1688", + "gradient": "4", "is_component": false, - "last_tested_version": "1.0.9", - "name": "Blog Writer" -} \ No newline at end of file + "last_tested_version": "1.0.19.post2", + "name": "Blog Writer", + "tags": ["chatbots", "content-generation"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json deleted file mode 100644 index f946f5b8ac48..000000000000 --- a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json +++ /dev/null @@ -1,4183 +0,0 @@ -{ - "data": { - "edges": [ - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "HierarchicalCrewComponent", - "id": "HierarchicalCrewComponent-EfNrX", - "name": "output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-ZEUNq", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-HierarchicalCrewComponent-EfNrX{œdataTypeœ:œHierarchicalCrewComponentœ,œidœ:œHierarchicalCrewComponent-EfNrXœ,œnameœ:œoutputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-ZEUNq{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-ZEUNqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "HierarchicalCrewComponent-EfNrX", - "sourceHandle": "{œdataTypeœ: œHierarchicalCrewComponentœ, œidœ: œHierarchicalCrewComponent-EfNrXœ, œnameœ: œoutputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-ZEUNq", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-ZEUNqœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "HierarchicalTaskComponent", - "id": "HierarchicalTaskComponent-yWaMT", - "name": "task_output", - "output_types": [ - "HierarchicalTask" - ] - }, - "targetHandle": { - "fieldName": "tasks", - "id": "HierarchicalCrewComponent-EfNrX", - "inputTypes": [ - "HierarchicalTask" - ], - "type": "other" - } - }, - "id": "reactflow__edge-HierarchicalTaskComponent-yWaMT{œdataTypeœ:œHierarchicalTaskComponentœ,œidœ:œHierarchicalTaskComponent-yWaMTœ,œnameœ:œtask_outputœ,œoutput_typesœ:[œHierarchicalTaskœ]}-HierarchicalCrewComponent-EfNrX{œfieldNameœ:œtasksœ,œidœ:œHierarchicalCrewComponent-EfNrXœ,œinputTypesœ:[œHierarchicalTaskœ],œtypeœ:œotherœ}", - "selected": false, - "source": "HierarchicalTaskComponent-yWaMT", - "sourceHandle": "{œdataTypeœ: œHierarchicalTaskComponentœ, œidœ: œHierarchicalTaskComponent-yWaMTœ, œnameœ: œtask_outputœ, œoutput_typesœ: [œHierarchicalTaskœ]}", - "target": "HierarchicalCrewComponent-EfNrX", - "targetHandle": "{œfieldNameœ: œtasksœ, œidœ: œHierarchicalCrewComponent-EfNrXœ, œinputTypesœ: [œHierarchicalTaskœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "CrewAIAgentComponent", - "id": "CrewAIAgentComponent-Qm4en", - "name": "output", - "output_types": [ - "Agent" - ] - }, - "targetHandle": { - "fieldName": "agents", - "id": "HierarchicalCrewComponent-EfNrX", - "inputTypes": [ - "Agent" - ], - "type": "other" - } - }, - "id": "reactflow__edge-CrewAIAgentComponent-Qm4en{œdataTypeœ:œCrewAIAgentComponentœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œnameœ:œoutputœ,œoutput_typesœ:[œAgentœ]}-HierarchicalCrewComponent-EfNrX{œfieldNameœ:œagentsœ,œidœ:œHierarchicalCrewComponent-EfNrXœ,œinputTypesœ:[œAgentœ],œtypeœ:œotherœ}", - "selected": false, - "source": "CrewAIAgentComponent-Qm4en", - "sourceHandle": "{œdataTypeœ: œCrewAIAgentComponentœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œnameœ: œoutputœ, œoutput_typesœ: [œAgentœ]}", - "target": "HierarchicalCrewComponent-EfNrX", - "targetHandle": "{œfieldNameœ: œagentsœ, œidœ: œHierarchicalCrewComponent-EfNrXœ, œinputTypesœ: [œAgentœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-S9ZnF", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-S9ZnF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-S9ZnFœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œllmœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "selected": false, - "source": "OpenAIModel-S9ZnF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-S9ZnFœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "CrewAIAgentComponent", - "id": "CrewAIAgentComponent-Q2BtZ", - "name": "output", - "output_types": [ - "Agent" - ] - }, - "targetHandle": { - "fieldName": "manager_agent", - "id": "HierarchicalCrewComponent-EfNrX", - "inputTypes": [ - "Agent" - ], - "type": "other" - } - }, - "id": "reactflow__edge-CrewAIAgentComponent-Q2BtZ{œdataTypeœ:œCrewAIAgentComponentœ,œidœ:œCrewAIAgentComponent-Q2BtZœ,œnameœ:œoutputœ,œoutput_typesœ:[œAgentœ]}-HierarchicalCrewComponent-EfNrX{œfieldNameœ:œmanager_agentœ,œidœ:œHierarchicalCrewComponent-EfNrXœ,œinputTypesœ:[œAgentœ],œtypeœ:œotherœ}", - "selected": false, - "source": "CrewAIAgentComponent-Q2BtZ", - "sourceHandle": "{œdataTypeœ: œCrewAIAgentComponentœ, œidœ: œCrewAIAgentComponent-Q2BtZœ, œnameœ: œoutputœ, œoutput_typesœ: [œAgentœ]}", - "target": "HierarchicalCrewComponent-EfNrX", - "targetHandle": "{œfieldNameœ: œmanager_agentœ, œidœ: œHierarchicalCrewComponent-EfNrXœ, œinputTypesœ: [œAgentœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-2V6yS", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "CrewAIAgentComponent-Q2BtZ", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-2V6yS{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-2V6ySœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-CrewAIAgentComponent-Q2BtZ{œfieldNameœ:œllmœ,œidœ:œCrewAIAgentComponent-Q2BtZœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "selected": false, - "source": "OpenAIModel-2V6yS", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-2V6ySœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "CrewAIAgentComponent-Q2BtZ", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œCrewAIAgentComponent-Q2BtZœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-AVtIX", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "task_description", - "id": "HierarchicalTaskComponent-yWaMT", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-AVtIX{œdataTypeœ:œPromptœ,œidœ:œPrompt-AVtIXœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-HierarchicalTaskComponent-yWaMT{œfieldNameœ:œtask_descriptionœ,œidœ:œHierarchicalTaskComponent-yWaMTœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "Prompt-AVtIX", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-AVtIXœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "HierarchicalTaskComponent-yWaMT", - "targetHandle": "{œfieldNameœ: œtask_descriptionœ, œidœ: œHierarchicalTaskComponent-yWaMTœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-nPZcc", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "query", - "id": "Prompt-AVtIX", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-nPZcc{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nPZccœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-AVtIX{œfieldNameœ:œqueryœ,œidœ:œPrompt-AVtIXœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "selected": false, - "source": "ChatInput-nPZcc", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nPZccœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-AVtIX", - "targetHandle": "{œfieldNameœ: œqueryœ, œidœ: œPrompt-AVtIXœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "YFinanceTool", - "id": "YFinanceTool-tWOoS", - "name": "tool", - "output_types": [ - "Tool" - ] - }, - "targetHandle": { - "fieldName": "tools", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "Tool" - ], - "type": "other" - } - }, - "id": "reactflow__edge-YFinanceTool-tWOoS{œdataTypeœ:œYFinanceToolœ,œidœ:œYFinanceTool-tWOoSœ,œnameœ:œtoolœ,œoutput_typesœ:[œToolœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œtoolsœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}", - "selected": false, - "source": "YFinanceTool-tWOoS", - "sourceHandle": "{œdataTypeœ: œYFinanceToolœ, œidœ: œYFinanceTool-tWOoSœ, œnameœ: œtoolœ, œoutput_typesœ: [œToolœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œToolœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-kSwv4", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-bjGnq", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-kSwv4{œdataTypeœ:œPromptœ,œidœ:œPrompt-kSwv4œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-bjGnq{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-bjGnqœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "Prompt-kSwv4", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-kSwv4œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-bjGnq", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-bjGnqœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-bjGnq", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "role", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-bjGnq{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-bjGnqœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œroleœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "OpenAIModel-bjGnq", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-bjGnqœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œroleœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-nPZcc", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "query", - "id": "Prompt-S0Qt3", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-nPZcc{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nPZccœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-S0Qt3{œfieldNameœ:œqueryœ,œidœ:œPrompt-S0Qt3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "selected": false, - "source": "ChatInput-nPZcc", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nPZccœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-S0Qt3", - "targetHandle": "{œfieldNameœ: œqueryœ, œidœ: œPrompt-S0Qt3œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-nPZcc", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "query", - "id": "Prompt-kSwv4", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-nPZcc{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nPZccœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-kSwv4{œfieldNameœ:œqueryœ,œidœ:œPrompt-kSwv4œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "selected": false, - "source": "ChatInput-nPZcc", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nPZccœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-kSwv4", - "targetHandle": "{œfieldNameœ: œqueryœ, œidœ: œPrompt-kSwv4œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-S0Qt3", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-GAfpF", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-S0Qt3{œdataTypeœ:œPromptœ,œidœ:œPrompt-S0Qt3œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-GAfpF{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-GAfpFœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "Prompt-S0Qt3", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-S0Qt3œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-GAfpF", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-GAfpFœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-bjGnq", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "role", - "id": "Prompt-S0Qt3", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-bjGnq{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-bjGnqœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-S0Qt3{œfieldNameœ:œroleœ,œidœ:œPrompt-S0Qt3œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "selected": false, - "source": "OpenAIModel-bjGnq", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-bjGnqœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-S0Qt3", - "targetHandle": "{œfieldNameœ: œroleœ, œidœ: œPrompt-S0Qt3œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-GAfpF", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "goal", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-GAfpF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-GAfpFœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œgoalœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "OpenAIModel-GAfpF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-GAfpFœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œgoalœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-a0x5s", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-EnsCt", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-a0x5s{œdataTypeœ:œPromptœ,œidœ:œPrompt-a0x5sœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-EnsCt{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EnsCtœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "Prompt-a0x5s", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-a0x5sœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-EnsCt", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-EnsCtœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-GAfpF", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "goal", - "id": "Prompt-a0x5s", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-GAfpF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-GAfpFœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-a0x5s{œfieldNameœ:œgoalœ,œidœ:œPrompt-a0x5sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-GAfpF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-GAfpFœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-a0x5s", - "targetHandle": "{œfieldNameœ: œgoalœ, œidœ: œPrompt-a0x5sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-bjGnq", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "role", - "id": "Prompt-a0x5s", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-bjGnq{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-bjGnqœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-a0x5s{œfieldNameœ:œroleœ,œidœ:œPrompt-a0x5sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-bjGnq", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-bjGnqœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-a0x5s", - "targetHandle": "{œfieldNameœ: œroleœ, œidœ: œPrompt-a0x5sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-nPZcc", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "query", - "id": "Prompt-a0x5s", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-nPZcc{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nPZccœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-a0x5s{œfieldNameœ:œqueryœ,œidœ:œPrompt-a0x5sœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-nPZcc", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nPZccœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-a0x5s", - "targetHandle": "{œfieldNameœ: œqueryœ, œidœ: œPrompt-a0x5sœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-EnsCt", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "backstory", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-EnsCt{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EnsCtœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œbackstoryœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-EnsCt", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-EnsCtœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œbackstoryœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "SearchAPI", - "id": "SearchAPI-nLBPg", - "name": "api_build_tool", - "output_types": [ - "Tool" - ] - }, - "targetHandle": { - "fieldName": "tools", - "id": "CrewAIAgentComponent-Qm4en", - "inputTypes": [ - "Tool" - ], - "type": "other" - } - }, - "id": "reactflow__edge-SearchAPI-nLBPg{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-nLBPgœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-CrewAIAgentComponent-Qm4en{œfieldNameœ:œtoolsœ,œidœ:œCrewAIAgentComponent-Qm4enœ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}", - "source": "SearchAPI-nLBPg", - "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-nLBPgœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", - "target": "CrewAIAgentComponent-Qm4en", - "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œCrewAIAgentComponent-Qm4enœ, œinputTypesœ: [œToolœ], œtypeœ: œotherœ}" - } - ], - "nodes": [ - { - "data": { - "description": "Represents a group of agents, defining how they should collaborate and the tasks they should perform.", - "display_name": "Hierarchical Crew", - "id": "HierarchicalCrewComponent-EfNrX", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents a group of agents, defining how they should collaborate and the tasks they should perform.", - "display_name": "Hierarchical Crew", - "documentation": "", - "edited": false, - "field_order": [ - "verbose", - "memory", - "use_cache", - "max_rpm", - "share_crew", - "function_calling_llm", - "agents", - "tasks", - "manager_llm", - "manager_agent" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Output", - "method": "build_output", - "name": "output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "agents": { - "advanced": false, - "display_name": "Agents", - "dynamic": false, - "info": "", - "input_types": [ - "Agent" - ], - "list": true, - "name": "agents", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Crew, Process # type: ignore\n\nfrom langflow.base.agents.crewai.crew import BaseCrewComponent\nfrom langflow.io import HandleInput\n\n\nclass HierarchicalCrewComponent(BaseCrewComponent):\n display_name: str = \"Hierarchical Crew\"\n description: str = (\n \"Represents a group of agents, defining how they should collaborate and the tasks they should perform.\"\n )\n documentation: str = \"https://docs.crewai.com/how-to/Hierarchical/\"\n icon = \"CrewAI\"\n\n inputs = BaseCrewComponent._base_inputs + [\n HandleInput(name=\"agents\", display_name=\"Agents\", input_types=[\"Agent\"], is_list=True),\n HandleInput(name=\"tasks\", display_name=\"Tasks\", input_types=[\"HierarchicalTask\"], is_list=True),\n HandleInput(name=\"manager_llm\", display_name=\"Manager LLM\", input_types=[\"LanguageModel\"], required=False),\n HandleInput(name=\"manager_agent\", display_name=\"Manager Agent\", input_types=[\"Agent\"], required=False),\n ]\n\n def build_crew(self) -> Crew:\n tasks, agents = self.get_tasks_and_agents()\n crew = Crew(\n agents=agents,\n tasks=tasks,\n process=Process.hierarchical,\n verbose=self.verbose,\n memory=self.memory,\n cache=self.use_cache,\n max_rpm=self.max_rpm,\n share_crew=self.share_crew,\n function_calling_llm=self.function_calling_llm,\n manager_agent=self.manager_agent,\n manager_llm=self.manager_llm,\n step_callback=self.get_step_callback(),\n task_callback=self.get_task_callback(),\n )\n return crew\n" - }, - "function_calling_llm": { - "advanced": true, - "display_name": "Function Calling LLM", - "dynamic": false, - "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "function_calling_llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "manager_agent": { - "advanced": false, - "display_name": "Manager Agent", - "dynamic": false, - "info": "", - "input_types": [ - "Agent" - ], - "list": false, - "name": "manager_agent", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "manager_llm": { - "advanced": false, - "display_name": "Manager LLM", - "dynamic": false, - "info": "", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "manager_llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "max_rpm": { - "advanced": true, - "display_name": "Max RPM", - "dynamic": false, - "info": "", - "list": false, - "name": "max_rpm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 100 - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "share_crew": { - "advanced": true, - "display_name": "Share Crew", - "dynamic": false, - "info": "", - "list": false, - "name": "share_crew", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "tasks": { - "advanced": false, - "display_name": "Tasks", - "dynamic": false, - "info": "", - "input_types": [ - "HierarchicalTask" - ], - "list": true, - "name": "tasks", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "use_cache": { - "advanced": true, - "display_name": "Cache", - "dynamic": false, - "info": "", - "list": false, - "name": "use_cache", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 0 - } - } - }, - "type": "HierarchicalCrewComponent" - }, - "height": 459, - "id": "HierarchicalCrewComponent-EfNrX", - "position": { - "x": 2444.845721347115, - "y": 1410.1850661630874 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-S9ZnF", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-3.5-turbo" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-S9ZnF", - "position": { - "x": 993.5222179419411, - "y": 2121.1120144471624 - }, - "positionAbsolute": { - "x": 993.5222179419411, - "y": 2121.1120144471624 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-ZEUNq", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "data_template": { - "advanced": true, - "display_name": "Data Template", - "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "data_template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Machine" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "AI" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "height": 309, - "id": "ChatOutput-ZEUNq", - "position": { - "x": 2947.7605810360546, - "y": 1557.6959660020289 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Each task must have a description, an expected output and an agent responsible for execution.", - "display_name": "Hierarchical Task", - "id": "HierarchicalTaskComponent-yWaMT", - "node": { - "base_classes": [ - "HierarchicalTask" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Each task must have a description, an expected output and an agent responsible for execution.", - "display_name": "Hierarchical Task", - "documentation": "", - "edited": false, - "field_order": [ - "task_description", - "expected_output", - "tools" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Task", - "method": "build_task", - "name": "task_output", - "selected": "HierarchicalTask", - "types": [ - "HierarchicalTask" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.agents.crewai.tasks import HierarchicalTask\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, MultilineInput, Output\n\n\nclass HierarchicalTaskComponent(Component):\n display_name: str = \"Hierarchical Task\"\n description: str = \"Each task must have a description, an expected output and an agent responsible for execution.\"\n icon = \"CrewAI\"\n inputs = [\n MultilineInput(\n name=\"task_description\",\n display_name=\"Description\",\n info=\"Descriptive text detailing task's purpose and execution.\",\n ),\n MultilineInput(\n name=\"expected_output\",\n display_name=\"Expected Output\",\n info=\"Clear definition of expected task outcome.\",\n ),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"List of tools/resources limited for task execution. Uses the Agent tools by default.\",\n required=False,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Task\", name=\"task_output\", method=\"build_task\"),\n ]\n\n def build_task(self) -> HierarchicalTask:\n task = HierarchicalTask(\n description=self.task_description,\n expected_output=self.expected_output,\n tools=self.tools or [],\n )\n self.status = task\n return task\n" - }, - "expected_output": { - "advanced": false, - "display_name": "Expected Output", - "dynamic": false, - "info": "Clear definition of expected task outcome.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "expected_output", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Succinct response that answers the User's query." - }, - "task_description": { - "advanced": false, - "display_name": "Description", - "dynamic": false, - "info": "Descriptive text detailing task's purpose and execution.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "task_description", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "advanced": true, - "display_name": "Tools", - "dynamic": false, - "info": "List of tools/resources limited for task execution. Uses the Agent tools by default.", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - } - } - }, - "type": "HierarchicalTaskComponent" - }, - "height": 455, - "id": "HierarchicalTaskComponent-yWaMT", - "position": { - "x": 1940.5188074417165, - "y": 682.2998623189735 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "id": "CrewAIAgentComponent-Qm4en", - "node": { - "base_classes": [ - "Agent" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "kwargs" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Agent", - "method": "build_output", - "name": "output", - "selected": "Agent", - "types": [ - "Agent" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "allow_code_execution": { - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "advanced": false, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "backstory": { - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent # type: ignore\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass CrewAIAgentComponent(Component):\n display_name = \"CrewAI Agent\"\n description = \"Represents an agent of CrewAI.\"\n documentation: str = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(name=\"backstory\", display_name=\"Backstory\", info=\"The backstory of the agent.\"),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agents disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=False,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"kwargs\",\n display_name=\"kwargs\",\n info=\"kwargs of agent.\",\n is_list=True,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Agent\", name=\"output\", method=\"build_output\"),\n ]\n\n def build_output(self) -> Agent:\n kwargs = self.kwargs if self.kwargs else {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **kwargs,\n )\n self.status = repr(agent)\n return agent\n" - }, - "goal": { - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "kwargs": { - "advanced": true, - "display_name": "kwargs", - "dynamic": false, - "info": "kwargs of agent.", - "list": true, - "name": "kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "llm": { - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "role": { - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agents disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "CrewAIAgentComponent" - }, - "dragging": false, - "height": 665, - "id": "CrewAIAgentComponent-Qm4en", - "position": { - "x": 1397.4912377259789, - "y": 1242.739374306084 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "id": "CrewAIAgentComponent-Q2BtZ", - "node": { - "base_classes": [ - "Agent" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "kwargs" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Agent", - "method": "build_output", - "name": "output", - "selected": "Agent", - "types": [ - "Agent" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "allow_code_execution": { - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "advanced": false, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "backstory": { - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You are polite and helpful. You've always been a beacon of politeness." - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent # type: ignore\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass CrewAIAgentComponent(Component):\n display_name = \"CrewAI Agent\"\n description = \"Represents an agent of CrewAI.\"\n documentation: str = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(name=\"backstory\", display_name=\"Backstory\", info=\"The backstory of the agent.\"),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agents disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=False,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"kwargs\",\n display_name=\"kwargs\",\n info=\"kwargs of agent.\",\n is_list=True,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Agent\", name=\"output\", method=\"build_output\"),\n ]\n\n def build_output(self) -> Agent:\n kwargs = self.kwargs if self.kwargs else {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **kwargs,\n )\n self.status = repr(agent)\n return agent\n" - }, - "goal": { - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You can answer general questions from the User and may call others for help if needed." - }, - "kwargs": { - "advanced": true, - "display_name": "kwargs", - "dynamic": false, - "info": "kwargs of agent.", - "list": true, - "name": "kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "llm": { - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "role": { - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Manager" - }, - "tools": { - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agents disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "CrewAIAgentComponent" - }, - "dragging": false, - "height": 665, - "id": "CrewAIAgentComponent-Q2BtZ", - "position": { - "x": 1897.563645835175, - "y": 2043.8342912334688 - }, - "positionAbsolute": { - "x": 1897.563645835175, - "y": 2043.8342912334688 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-2V6yS", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-2V6yS", - "position": { - "x": 1689.7403176652529, - "y": 2778.554803586579 - }, - "positionAbsolute": { - "x": 1689.7403176652529, - "y": 2778.554803586579 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-AVtIX", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "query" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "error": null, - "field_order": [ - "template" - ], - "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "query": { - "advanced": false, - "display_name": "query", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "query", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "User's query:\n{query}\n\nRespond to the user with as much as information as you can about the topic. Delete if needed. If it is just a general query (e.g a greeting) you can respond them directly." - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 423, - "id": "Prompt-AVtIX", - "position": { - "x": 1314.943965489173, - "y": 624.296875 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "ChatInput-nPZcc", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "files" - ], - "frozen": false, - "icon": "ChatInput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "files": { - "advanced": true, - "display_name": "Files", - "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx", - "jpg", - "jpeg", - "png", - "bmp", - "image" - ], - "file_path": "", - "info": "Files to be sent with the message.", - "list": true, - "name": "files", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "file", - "value": "" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Could you search info about AAPL?" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatInput" - }, - "dragging": false, - "height": 309, - "id": "ChatInput-nPZcc", - "position": { - "x": -812.219234501281, - "y": 283.9527676042414 - }, - "positionAbsolute": { - "x": -812.219234501281, - "y": 283.9527676042414 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Tool for interacting with Yahoo Finance News.", - "display_name": "Yahoo Finance News Tool", - "id": "YFinanceTool-tWOoS", - "node": { - "base_classes": [ - "Tool" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Tool for interacting with Yahoo Finance News.", - "display_name": "Yahoo Finance News Tool", - "documentation": "", - "edited": false, - "field_order": [], - "frozen": false, - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Tool", - "method": "build_tool", - "name": "tool", - "selected": "Tool", - "types": [ - "Tool" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from typing import cast\n\nfrom langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import Tool\nfrom langflow.io import Output\n\n\nclass YfinanceToolComponent(Component):\n display_name = \"Yahoo Finance News Tool\"\n description = \"Tool for interacting with Yahoo Finance News.\"\n name = \"YFinanceTool\"\n\n outputs = [\n Output(display_name=\"Tool\", name=\"tool\", method=\"build_tool\"),\n ]\n\n def build_tool(self) -> Tool:\n return cast(Tool, YahooFinanceNewsTool())\n" - } - } - }, - "type": "YFinanceTool" - }, - "dragging": false, - "height": 219, - "id": "YFinanceTool-tWOoS", - "position": { - "x": 339.85802955438953, - "y": 941.0061737791777 - }, - "positionAbsolute": { - "x": 339.85802955438953, - "y": 941.0061737791777 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-bjGnq", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-3.5-turbo" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-bjGnq", - "position": { - "x": -1421.3072930401338, - "y": 944.2116827656167 - }, - "positionAbsolute": { - "x": -1421.3072930401338, - "y": 944.2116827656167 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-kSwv4", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "query" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Role Prompt", - "documentation": "", - "edited": false, - "error": null, - "field_order": [ - "template" - ], - "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "hidden": null, - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n" - }, - "query": { - "advanced": false, - "display_name": "query", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "query", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Define a Role that could execute or answer well the user's query.\n\nUser's query: {query}\n\nRole should be two words max. Something like \"Researcher\" or \"Software Developer\".\n" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 423, - "id": "Prompt-kSwv4", - "position": { - "x": -2011.857599027479, - "y": 811.2903194233206 - }, - "positionAbsolute": { - "x": -2011.857599027479, - "y": 811.2903194233206 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-GAfpF", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-3.5-turbo" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-GAfpF", - "position": { - "x": -652.5526340446298, - "y": 2170.3301251807097 - }, - "positionAbsolute": { - "x": -652.5526340446298, - "y": 2170.3301251807097 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-S0Qt3", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "query", - "role" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Goal Prompt", - "documentation": "", - "edited": false, - "error": null, - "field_order": [ - "template" - ], - "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "hidden": null, - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n" - }, - "query": { - "advanced": false, - "display_name": "query", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "query", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "role": { - "advanced": false, - "display_name": "role", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Define the Goal of this Role, given the User's Query. \nUser's query: {query}\n\nRole: {role}\n\nThe goal should be concise and specific.\nGoal: \n" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 517, - "id": "Prompt-S0Qt3", - "position": { - "x": -1127.1897676702288, - "y": 1693.922415635935 - }, - "positionAbsolute": { - "x": -1127.1897676702288, - "y": 1693.922415635935 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-EnsCt", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-3.5-turbo" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-EnsCt", - "position": { - "x": -173.231944282948, - "y": 3277.114857802737 - }, - "positionAbsolute": { - "x": -173.231944282948, - "y": 3277.114857802737 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-a0x5s", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "query", - "role", - "goal" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "error": null, - "field_order": [ - "template" - ], - "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "goal": { - "advanced": false, - "display_name": "goal", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "query": { - "advanced": false, - "display_name": "query", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "query", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "role": { - "advanced": false, - "display_name": "role", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Define a Backstory of this Role and Goal, given the User's Query. \nUser's query: {query}\n\nRole: {role}\nGoal: {goal}\n\nThe backstory should be specific and well aligned with the rest of the information.\nBackstory:" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 611, - "id": "Prompt-a0x5s", - "position": { - "x": -559.9999554636487, - "y": 2893.2894056013133 - }, - "positionAbsolute": { - "x": -559.9999554636487, - "y": 2893.2894056013133 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Call the searchapi.io API", - "display_name": "Search API", - "id": "SearchAPI-nLBPg", - "node": { - "base_classes": [ - "Data", - "Tool" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Call the searchapi.io API", - "display_name": "Search API", - "documentation": "https://www.searchapi.io/docs/google", - "edited": false, - "field_order": [ - "engine", - "api_key", - "input_value", - "search_params" - ], - "frozen": false, - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Data", - "method": "run_model", - "name": "api_run_model", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Tool", - "method": "build_tool", - "name": "api_build_tool", - "selected": "Tool", - "types": [ - "Tool" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "advanced": false, - "display_name": "SearchAPI API Key", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from typing import Union\n\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, MultilineInput, DictInput, MessageTextInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\n\n\nclass SearchAPIComponent(LCToolComponent):\n display_name: str = \"Search API\"\n description: str = \"Call the searchapi.io API\"\n name = \"SearchAPI\"\n documentation: str = \"https://www.searchapi.io/docs/google\"\n\n inputs = [\n MessageTextInput(name=\"engine\", display_name=\"Engine\", value=\"google\"),\n SecretStrInput(name=\"api_key\", display_name=\"SearchAPI API Key\", required=True),\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input\",\n ),\n DictInput(name=\"search_params\", display_name=\"Search parameters\", advanced=True, is_list=True),\n ]\n\n def run_model(self) -> Union[Data, list[Data]]:\n wrapper = self._build_wrapper()\n results = wrapper.results(query=self.input_value, **(self.search_params or {}))\n list_results = results.get(\"organic_results\", [])\n data = [Data(data=result, text=result[\"snippet\"]) for result in list_results]\n self.status = data\n return data\n\n def build_tool(self) -> Tool:\n wrapper = self._build_wrapper()\n return Tool(\n name=\"search_api\",\n description=\"Search for recent results.\",\n func=lambda x: wrapper.run(query=x, **(self.search_params or {})),\n )\n\n def _build_wrapper(self):\n return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key)\n" - }, - "engine": { - "advanced": false, - "display_name": "Engine", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "engine", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "google" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "search_params": { - "advanced": true, - "display_name": "Search parameters", - "dynamic": false, - "info": "", - "list": true, - "name": "search_params", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - } - } - }, - "type": "SearchAPI" - }, - "dragging": false, - "height": 545, - "id": "SearchAPI-nLBPg", - "position": { - "x": 333.3937684700711, - "y": 310.93183831810336 - }, - "positionAbsolute": { - "x": 333.3937684700711, - "y": 310.93183831810336 - }, - "selected": false, - "type": "genericNode", - "width": 384 - } - ], - "viewport": { - "x": 0, - "y": 0, - "zoom": 1 - } - }, - "description": "This Agent is created on the fly based on what the user asks and a Manager Agent calls it if needed.", - "endpoint_name": null, - "id": "07cd68d7-d864-4cfe-9901-0ccc61d6e80d", - "is_component": false, - "last_tested_version": "1.0.9", - "name": "Complex Agent" -} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json new file mode 100644 index 000000000000..60296156459e --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json @@ -0,0 +1,1832 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Memory", + "id": "Memory-tBe70", + "name": "messages_text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "CHAT_HISTORY", + "id": "Prompt-WSv03", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Memory-tBe70{œdataTypeœ:œMemoryœ,œidœ:œMemory-tBe70œ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-WSv03{œfieldNameœ:œCHAT_HISTORYœ,œidœ:œPrompt-WSv03œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Memory-tBe70", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-tBe70œ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WSv03", + "targetHandle": "{œfieldNameœ: œCHAT_HISTORYœ, œidœ: œPrompt-WSv03œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-VUqPC", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "USER_INPUT", + "id": "Prompt-WSv03", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-VUqPC{œdataTypeœ:œChatInputœ,œidœ:œChatInput-VUqPCœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-WSv03{œfieldNameœ:œUSER_INPUTœ,œidœ:œPrompt-WSv03œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-VUqPC", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-VUqPCœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WSv03", + "targetHandle": "{œfieldNameœ: œUSER_INPUTœ, œidœ: œPrompt-WSv03œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-WSv03", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "AnthropicModel-laWKJ", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-WSv03{œdataTypeœ:œPromptœ,œidœ:œPrompt-WSv03œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-AnthropicModel-laWKJ{œfieldNameœ:œinput_valueœ,œidœ:œAnthropicModel-laWKJœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-WSv03", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-WSv03œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "AnthropicModel-laWKJ", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAnthropicModel-laWKJœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "AnthropicModel", + "id": "AnthropicModel-laWKJ", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-XNaWv", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-AnthropicModel-laWKJ{œdataTypeœ:œAnthropicModelœ,œidœ:œAnthropicModel-laWKJœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-XNaWv{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-XNaWvœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "AnthropicModel-laWKJ", + "sourceHandle": "{œdataTypeœ: œAnthropicModelœ, œidœ: œAnthropicModel-laWKJœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-XNaWv", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-XNaWvœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "URL", + "id": "URL-9b1oo", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "EXAMPLE_COMPONENTS", + "id": "Prompt-WSv03", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-URL-9b1oo{œdataTypeœ:œURLœ,œidœ:œURL-9b1ooœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-WSv03{œfieldNameœ:œEXAMPLE_COMPONENTSœ,œidœ:œPrompt-WSv03œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "URL-9b1oo", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-9b1ooœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WSv03", + "targetHandle": "{œfieldNameœ: œEXAMPLE_COMPONENTSœ, œidœ: œPrompt-WSv03œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "URL", + "id": "URL-HTi1a", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "BASE_COMPONENT_CODE", + "id": "Prompt-WSv03", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-URL-HTi1a{œdataTypeœ:œURLœ,œidœ:œURL-HTi1aœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-WSv03{œfieldNameœ:œBASE_COMPONENT_CODEœ,œidœ:œPrompt-WSv03œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "URL-HTi1a", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-HTi1aœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WSv03", + "targetHandle": "{œfieldNameœ: œBASE_COMPONENT_CODEœ, œidœ: œPrompt-WSv03œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "URL", + "id": "URL-bqkBy", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "CUSTOM_COMPONENT_CODE", + "id": "Prompt-WSv03", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-URL-bqkBy{œdataTypeœ:œURLœ,œidœ:œURL-bqkByœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-WSv03{œfieldNameœ:œCUSTOM_COMPONENT_CODEœ,œidœ:œPrompt-WSv03œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "URL-bqkBy", + "sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-bqkByœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-WSv03", + "targetHandle": "{œfieldNameœ: œCUSTOM_COMPONENT_CODEœ, œidœ: œPrompt-WSv03œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-VUqPC", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Failed to get YouTube transcripts: 1 validation error for Data\ndata\n Input should be a valid dictionary [type=dict_type, input_value=Document(metadata={'sourc...adding the API key and\"), input_type=Document]\n For further information visit https://errors.pydantic.dev/2.9/v/dict_type\n\n\nAlso, adapt the \"text\" bc it returns this \"Document(metadata={'source': 'UkV79sJAvz8'}, page_content=\"assembly AI is one of the leaders in transcription services so you can convert speech into text and they have many different products available on their platform and we can use assembly within langlow and the way we can get started is to First make an account with assembly Ai and once you get started you will be provided with an API key this is something we're going to need in langlow now back in link flow there are a few different components available and in this example we are using the start transcript component within this component we can provide a file and what I did is I uploaded a 1-hour talk by Andre karpati and this is intro to the large language models and this is an MP3 file so after adding the API key and then the audio file we can select a model that we want to use for the transcription there are two different options available here best and Nano now after you select the model you can either have the language detection on or leave defaults so I left everything in default and then I started the task and once we run the flow we get a transcript ID and attaching this component with the assembly AI pole transcript component we can now get the results and if we were to look at the results available able from this component there are quite a lot of fields that we can see as a result of this component and some of the most important ones you can see is the text from the transcript as you can see it's quite a large file and all of that was converted from speech to text easily by assembly AI it just took a few seconds and then we can see Word level timestamps if needed as what was spoken at what time the starting and end time for that and also the confidence if there are multiple speakers then it also identifies the speakers for us and then we can also see the utterances at different times so there's also word there's a full text and there's some additional information available here now we can use this data for many different things one is we can parse the transcript so we can just look at the full transcript that was available from this video or in this case this MP3 file and then we can also run to get subtitles and this could be used for any Services where we want to add subtitles in different formats so there is the SRT and the VT format available and the way this looks so I ran it for SRT We have basically the time stamps as well as the sentences those were converted from those time stamps and we can see that it goes on for the full length of the audio file and then if needed we can also convert that to vtt last thing is that if you have credits available in your assembly AI account you can also perform a summary of the audio or you could perhaps do some additional task so for example in our case we could say that create a summary of the transcript we could also say that create a blog post from the transcript or perhaps an essay from the transcript so we can get creative with the available information since the transcript of the file is now available and we can utilize that text for many different purposes the flow and the components should be available in the store be sure to add your API key in all of these components wherever it says to add the API key if not it might throw some errors and there are also some additional components available you can check those out based on your use cases as well give it a try and let us know if you found it helpful\")\" \n\nwe only want the page_content " + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-VUqPC", + "position": { + "x": 1436.7228707197569, + "y": 1045.2749109595 + }, + "positionAbsolute": { + "x": 1436.7228707197569, + "y": 1045.2749109595 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "id": "Memory-tBe70", + "node": { + "base_classes": ["Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "documentation": "", + "edited": false, + "field_order": [ + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template" + ], + "frozen": false, + "icon": "message-square-more", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "retrieve_messages", + "name": "messages", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Text", + "method": "retrieve_messages_as_text", + "name": "messages_text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Message History\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Text\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n chat_memory = self.memory or LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + } + }, + "tool_mode": false + }, + "type": "Memory" + }, + "dragging": false, + "height": 264, + "id": "Memory-tBe70", + "position": { + "x": 1830.6888981898887, + "y": 946.1205963195098 + }, + "positionAbsolute": { + "x": 1830.6888981898887, + "y": 946.1205963195098 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-WSv03", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "BASE_COMPONENT_CODE", + "CUSTOM_COMPONENT_CODE", + "EXAMPLE_COMPONENTS", + "CHAT_HISTORY", + "USER_INPUT" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "BASE_COMPONENT_CODE": { + "advanced": false, + "display_name": "BASE_COMPONENT_CODE", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "BASE_COMPONENT_CODE", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "CHAT_HISTORY": { + "advanced": false, + "display_name": "CHAT_HISTORY", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "CHAT_HISTORY", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "CUSTOM_COMPONENT_CODE": { + "advanced": false, + "display_name": "CUSTOM_COMPONENT_CODE", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "CUSTOM_COMPONENT_CODE", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "EXAMPLE_COMPONENTS": { + "advanced": false, + "display_name": "EXAMPLE_COMPONENTS", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "EXAMPLE_COMPONENTS", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "USER_INPUT": { + "advanced": false, + "display_name": "USER_INPUT", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "USER_INPUT", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "\nYou are an AI assistant specialized in creating Langflow components based on user requirements. Your task is to generate the code for a custom Langflow component according to the user's specifications.\n\nFirst, review the following code snippets for reference:\n\n\n{BASE_COMPONENT_CODE}\n\n\n\n{CUSTOM_COMPONENT_CODE}\n\n\n\n{EXAMPLE_COMPONENTS}\n\n\nNow, follow these steps to create a custom Langflow component:\n\n1. Analyze the user's input to determine the requirements for the component.\n2. Use an section to plan out the component structure and features based on the user's requirements.\n3. Generate the code for the custom component, using the provided code snippets as reference and inspiration.\n4. Provide a brief explanation of the component's functionality and how to use it.\n\nHere's the chat history and user input:\n\n\n{CHAT_HISTORY}\n\n\n\n{USER_INPUT}\n\n\nBased on the user's input, create a custom Langflow component that meets their requirements. Your response should include:\n\n1. \n Use this section to analyze the user's requirements and plan the component structure.\n\n\n2. \n Generate the complete code for the custom Langflow component here.\n\n\n3. \n Provide a brief explanation of the component's functionality and how to use it.\n\n\nRemember to:\n- Use the provided code snippets as a reference, but create a unique component tailored to the user's needs.\n- Include all necessary imports and class definitions.\n- Implement the required inputs, outputs, and any additional features specified by the user.\n- Use clear and descriptive variable names and comments to enhance code readability.\n- Ensure that the component follows Langflow best practices and conventions.\n\nIf the user's input is unclear or lacks specific details, make reasonable assumptions based on the context and explain these assumptions in your response.\n\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 693, + "id": "Prompt-WSv03", + "position": { + "x": 2219.5265974825707, + "y": 521.6320563271215 + }, + "positionAbsolute": { + "x": 2219.5265974825707, + "y": 521.6320563271215 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-XNaWv", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-XNaWv", + "position": { + "x": 2947.267779013826, + "y": 891.8123698756774 + }, + "positionAbsolute": { + "x": 2947.267779013826, + "y": 891.8123698756774 + }, + "selected": true, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-ybslb", + "node": { + "description": "# Fetch Components code \n\nUsing the URL component we are extracting from Github, the code from a few classes to provide as example to the LLM. \n\nThis ensures we are always up to date with recent information from the codebase.", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-ybslb", + "position": { + "x": 1430.2014058924922, + "y": -19.30392196909918 + }, + "positionAbsolute": { + "x": 1430.2014058924922, + "y": -19.30392196909918 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "note-7Judu", + "node": { + "description": "# 🛠️ Custom Component Generator 🚀\n\nHi! I'm here to help you create custom components for Langflow. Think of me as your technical partner who can help turn your ideas into working components! \n\n## 🎯 How to Work With Me\n\n### 1. 💭 Tell Me What You Want to Build\nSimply describe what you want your component to do in plain English. For example:\n- \"I need a component that sends Slack messages\"\n- \"I want to create a tool that can process CSV files\"\n- \"I need something that can translate text\"\n\n### 2. 📚 Share Any Relevant Information\nIf you're working with a specific:\n- 🔑 API or service (just share the documentation link or main endpoints)\n- 📄 File format\n- 🔄 Data structure\n- 🔧 Existing component you want to modify\n\n### 3. 🎨 Let Me Help Design It\nI'll help by:\n- 📊 Breaking down complex requirements into manageable pieces\n- 💡 Suggesting the best way to structure inputs and outputs\n- ⚙️ Creating the component code\n- 📝 Explaining how to use it\n\n### 4. 🔄 Iterative Refinement\nWe can then:\n- ✅ Test and refine the component\n- ⭐ Add features\n- 🔧 Modify behavior\n- 🛡️ Improve error handling\n- 📖 Add documentation\n\n## 🚀 What I Can Help With\n\nI can help create components that:\n- 📊 Process different file types (CSV, JSON, Excel, etc.)\n- 🔌 Integrate with external APIs\n- 🔄 Transform data\n- 🔀 Route messages\n- 🌐 Handle web requests\n- 🎯 Parse structured data\n- ✨ And much more!\n\n## 💡 Tips for Best Results\n\n1. **Be Specific** 🎯: The more details you provide about what you want to accomplish, the better I can help.\n\n2. **Share Examples** 📋: If you have example data or specific use cases, share them.\n\n3. **Ask Questions** ❓: Don't hesitate to ask for clarification or modifications.\n\nJust start by telling me what kind of component you'd like to create, and I'll guide you through the process! \n\nReady to build something awesome? 🚀 Let's get started!", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 573, + "id": "note-7Judu", + "position": { + "x": 807.6293964045135, + "y": 605.6504562080672 + }, + "positionAbsolute": { + "x": 807.6293964045135, + "y": 605.6504562080672 + }, + "resizing": false, + "selected": false, + "style": { + "height": 573, + "width": 564 + }, + "type": "noteNode", + "width": 564 + }, + { + "data": { + "id": "AnthropicModel-laWKJ", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generate text using Anthropic Chat&Completion LLMs with prefill support.", + "display_name": "Anthropic", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model", + "anthropic_api_key", + "temperature", + "anthropic_api_url", + "prefill", + "output_parser" + ], + "frozen": false, + "icon": "Anthropic", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "anthropic_api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Anthropic API Key", + "dynamic": false, + "info": "Your Anthropic API key.", + "input_types": ["Message"], + "load_from_db": true, + "name": "anthropic_api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "ANTHROPIC_API_KEY" + }, + "anthropic_api_url": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Anthropic API URL", + "dynamic": false, + "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "anthropic_api_url", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs.inputs import HandleInput\nfrom langflow.io import DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=[\n \"claude-3-5-sonnet-20240620\",\n \"claude-3-opus-20240229\",\n \"claude-3-sonnet-20240229\",\n \"claude-3-haiku-20240307\",\n ],\n info=\"https://python.langchain.com/docs/integrations/chat/anthropic\",\n value=\"claude-3-5-sonnet-20240620\",\n ),\n SecretStrInput(name=\"anthropic_api_key\", display_name=\"Anthropic API Key\", info=\"Your Anthropic API key.\"),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n MessageTextInput(\n name=\"anthropic_api_url\",\n display_name=\"Anthropic API URL\",\n advanced=True,\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n model = self.model\n anthropic_api_key = self.anthropic_api_key\n max_tokens = self.max_tokens\n temperature = self.temperature\n anthropic_api_url = self.anthropic_api_url or \"https://api.anthropic.com\"\n\n try:\n output = ChatAnthropic(\n model=model,\n anthropic_api_key=(SecretStr(anthropic_api_key).get_secret_value() if anthropic_api_key else None),\n max_tokens_to_sample=max_tokens,\n temperature=temperature,\n anthropic_api_url=anthropic_api_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 4096 + }, + "model": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "https://python.langchain.com/docs/integrations/chat/anthropic", + "name": "model", + "options": [ + "claude-3-5-sonnet-20240620", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "claude-3-5-sonnet-20240620" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "prefill": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Prefill", + "dynamic": false, + "info": "Prefill text to guide the model's response.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "prefill", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + } + }, + "type": "AnthropicModel" + }, + "dragging": false, + "height": 515, + "id": "AnthropicModel-laWKJ", + "position": { + "x": 2587.564685535714, + "y": 646.2448246136587 + }, + "positionAbsolute": { + "x": 2587.564685535714, + "y": 646.2448246136587 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "URL-HTi1a", + "node": { + "base_classes": ["Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Fetch content from one or more URLs.", + "display_name": "URL", + "documentation": "", + "edited": false, + "field_order": ["urls", "format"], + "frozen": false, + "icon": "layout-template", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "fetch_content", + "name": "data", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Text", + "method": "fetch_content_text", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import re\n\nfrom langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output format\",\n info=\"Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.\",\n options=[\"Text\", \"Raw HTML\"],\n value=\"Text\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Text\", name=\"text\", method=\"fetch_content_text\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n msg = f\"Invalid URL: {string}\"\n raise ValueError(msg)\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n if self.format == \"Raw HTML\":\n loader = AsyncHtmlLoader(web_path=urls, encoding=\"utf-8\")\n else:\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n data = self.fetch_content()\n\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "format": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Output format", + "dynamic": false, + "info": "Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", + "name": "format", + "options": ["Text", "Raw HTML"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Text" + }, + "urls": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "URLs", + "dynamic": false, + "info": "Enter one or more URLs, by clicking the '+' button.", + "input_types": ["Message"], + "list": true, + "load_from_db": false, + "name": "urls", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": [ + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/custom/custom_component/component.py" + ] + } + }, + "tool_mode": false + }, + "type": "URL" + }, + "dragging": false, + "height": 368, + "id": "URL-HTi1a", + "position": { + "x": 1436.3617127766433, + "y": 264.218898085405 + }, + "positionAbsolute": { + "x": 1436.3617127766433, + "y": 264.218898085405 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "URL-9b1oo", + "node": { + "base_classes": ["Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Fetch content from one or more URLs.", + "display_name": "URL", + "documentation": "", + "edited": false, + "field_order": ["urls", "format"], + "frozen": false, + "icon": "layout-template", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "fetch_content", + "name": "data", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Text", + "method": "fetch_content_text", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import re\n\nfrom langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output format\",\n info=\"Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.\",\n options=[\"Text\", \"Raw HTML\"],\n value=\"Text\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Text\", name=\"text\", method=\"fetch_content_text\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n msg = f\"Invalid URL: {string}\"\n raise ValueError(msg)\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n if self.format == \"Raw HTML\":\n loader = AsyncHtmlLoader(web_path=urls, encoding=\"utf-8\")\n else:\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n data = self.fetch_content()\n\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "format": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Output format", + "dynamic": false, + "info": "Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", + "name": "format", + "options": ["Text", "Raw HTML"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Text" + }, + "urls": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "URLs", + "dynamic": false, + "info": "Enter one or more URLs, by clicking the '+' button.", + "input_types": ["Message"], + "list": true, + "load_from_db": false, + "name": "urls", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": [ + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/helpers/extract_key.py", + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/helpers/data_conditional_router.py", + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/tools/calculator.py", + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/tools/tavily_search.py", + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/models/ollama.py" + ] + } + }, + "tool_mode": false + }, + "type": "URL" + }, + "dragging": false, + "height": 568, + "id": "URL-9b1oo", + "position": { + "x": 1837.4098708175009, + "y": 323.6213535047409 + }, + "positionAbsolute": { + "x": 1837.4098708175009, + "y": 323.6213535047409 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "URL-bqkBy", + "node": { + "base_classes": ["Data", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Fetch content from one or more URLs.", + "display_name": "URL", + "documentation": "", + "edited": false, + "field_order": ["urls", "format"], + "frozen": false, + "icon": "layout-template", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "fetch_content", + "name": "data", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Text", + "method": "fetch_content_text", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import re\n\nfrom langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output format\",\n info=\"Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.\",\n options=[\"Text\", \"Raw HTML\"],\n value=\"Text\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Text\", name=\"text\", method=\"fetch_content_text\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n msg = f\"Invalid URL: {string}\"\n raise ValueError(msg)\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n if self.format == \"Raw HTML\":\n loader = AsyncHtmlLoader(web_path=urls, encoding=\"utf-8\")\n else:\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n data = self.fetch_content()\n\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "format": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Output format", + "dynamic": false, + "info": "Output format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.", + "name": "format", + "options": ["Text", "Raw HTML"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Text" + }, + "urls": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "URLs", + "dynamic": false, + "info": "Enter one or more URLs, by clicking the '+' button.", + "input_types": ["Message"], + "list": true, + "load_from_db": false, + "name": "urls", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": [ + "https://raw.githubusercontent.com/langflow-ai/langflow/refs/heads/main/src/backend/base/langflow/components/custom_component/custom_component.py" + ] + } + }, + "tool_mode": false + }, + "type": "URL" + }, + "dragging": false, + "height": 368, + "id": "URL-bqkBy", + "position": { + "x": 1436.982480021523, + "y": 651.1409296825055 + }, + "positionAbsolute": { + "x": 1436.982480021523, + "y": 651.1409296825055 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -776.9383804523111, + "y": -4.900459222819279, + "zoom": 0.5322184854204348 + } + }, + "description": "Generates well-structured code for custom components following Langflow's specifications.", + "endpoint_name": null, + "icon": "SquareCode", + "id": "aabe02e5-255f-447a-8ec2-be6f43955a43", + "gradient": "1", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Custom Component Generator", + "tags": ["coding", "web-scraping"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document Q&A.json b/src/backend/base/langflow/initial_setup/starter_projects/Document Q&A.json new file mode 100644 index 000000000000..65078b8ec4c7 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document Q&A.json @@ -0,0 +1,1442 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-1lWBj", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-HIx8w", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-1lWBj{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1lWBjœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HIx8w{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-HIx8wœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-1lWBj", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1lWBjœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-HIx8w", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-HIx8wœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-HIx8w", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-hKFON", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-HIx8w{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-HIx8wœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-hKFON{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-hKFONœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-HIx8w", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-HIx8wœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-hKFON", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-hKFONœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "File", + "id": "File-dlDLp", + "name": "data", + "output_types": ["Data"] + }, + "targetHandle": { + "fieldName": "data", + "id": "ParseData-mIiSz", + "inputTypes": ["Data"], + "type": "other" + } + }, + "id": "reactflow__edge-File-dlDLp{œdataTypeœ:œFileœ,œidœ:œFile-dlDLpœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-mIiSz{œfieldNameœ:œdataœ,œidœ:œParseData-mIiSzœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-dlDLp", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-dlDLpœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-mIiSz", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-mIiSzœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "ParseData", + "id": "ParseData-mIiSz", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "Document", + "id": "Prompt-L5CiD", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-ParseData-mIiSz{œdataTypeœ:œParseDataœ,œidœ:œParseData-mIiSzœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-L5CiD{œfieldNameœ:œDocumentœ,œidœ:œPrompt-L5CiDœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-mIiSz", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-mIiSzœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-L5CiD", + "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-L5CiDœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-L5CiD", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-HIx8w", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-L5CiD{œdataTypeœ:œPromptœ,œidœ:œPrompt-L5CiDœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HIx8w{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-HIx8wœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-L5CiD", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-L5CiDœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-HIx8w", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-HIx8wœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-1lWBj", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "What is this document is about?" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-1lWBj", + "position": { + "x": 516.7529480335185, + "y": 237.04967879541528 + }, + "positionAbsolute": { + "x": 516.7529480335185, + "y": 237.04967879541528 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-hKFON", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-hKFON", + "position": { + "x": 1631.3766926569258, + "y": 136.66509468115308 + }, + "positionAbsolute": { + "x": 1631.3766926569258, + "y": 136.66509468115308 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "id": "ParseData-mIiSz", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "documentation": "", + "edited": false, + "field_order": ["data", "template", "sep"], + "frozen": false, + "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "parse_data", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "data": { + "advanced": false, + "display_name": "Data", + "dynamic": false, + "info": "The data to convert to text.", + "input_types": ["Data"], + "list": false, + "name": "data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "sep": { + "advanced": true, + "display_name": "Separator", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "sep", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "\n" + }, + "template": { + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + } + } + }, + "type": "ParseData" + }, + "dragging": false, + "height": 302, + "id": "ParseData-mIiSz", + "position": { + "x": 514.8054600415829, + "y": -117.1921617826383 + }, + "positionAbsolute": { + "x": 514.8054600415829, + "y": -117.1921617826383 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-Tz3ZY", + "node": { + "description": "## Get Your OpenAI API Key\n\n**Steps**:\n\n1. **Visit** [OpenAI's API Key Page](https://platform.openai.com/api-keys).\n\n2. **Log In/Sign Up**:\n - Log in or create a new OpenAI account.\n\n3. **Generate API Key**:\n - Click \"Create New Secret Key\" to obtain your key.\n\n4. **Store Your Key Securely**:\n - Note it down as it will only display once.\n\n5. **Enter API Key**:\n - Input your key in the OpenAI API Key field within the component setup.\n\nKeep your key safe and manage it responsibly!", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": true, + "height": 325, + "id": "note-Tz3ZY", + "position": { + "x": 1253.2038187140245, + "y": -421.5721019678553 + }, + "positionAbsolute": { + "x": 1253.2038187140245, + "y": -421.5721019678553 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "OpenAIModel-HIx8w", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-HIx8w", + "position": { + "x": 1259.2100978002586, + "y": -88.15692253090975 + }, + "positionAbsolute": { + "x": 1259.2100978002586, + "y": -88.15692253090975 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-XGIUa", + "node": { + "description": "# Document Q&A\n\n**Purpose:**\nThis flow leverages a language model to answer questions based on content from a loaded document. It's ideal for obtaining quick insights from PDFs or other text files by asking direct questions.\n\n**Components**:\n1. **File Component**: Loads and processes your document in supported formats.\n2. **Parse Data**: Converts the loaded document into text using a specified template for consistent processing.\n3. **Prompt Component**: Forms a structured query by combining the parsed document content with user questions.\n4. **OpenAI Model**: Engages OpenAI's language model to generate responses to queries based on the document context.\n5. **Chat Input/Output**: Facilitates user queries and displays AI-generated answers seamlessly.\n\n**Steps to Use**:\n1. **Upload Document**: Use the \"File\" component to upload a document or text file you want to query.\n2. **Enter Question**: Through the \"Chat Input\" field, type your question related to the document content.\n3. **Run the Flow**: Activate the flow to process the input and generate an answer using the OpenAI model.\n4. **View Response**: Read the generated answer in the \"Chat Output\" field for immediate insights.\n\n**Benefits**:\n- Simplifies the process of extracting information from documents.\n- Provides a user-friendly interface for interactive document exploration using AI.\n", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 452, + "id": "note-XGIUa", + "position": { + "x": -338.7070086205371, + "y": -177.11912020709357 + }, + "positionAbsolute": { + "x": -338.7070086205371, + "y": -177.11912020709357 + }, + "resizing": false, + "selected": false, + "style": { + "height": 452, + "width": 469 + }, + "type": "noteNode", + "width": 469 + }, + { + "data": { + "id": "File-dlDLp", + "node": { + "base_classes": ["Data"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Load a file to be used in your project.", + "display_name": "File", + "documentation": "", + "edited": false, + "field_order": [ + "path", + "silent_errors", + "use_multithreading", + "concurrency_multithreading" + ], + "frozen": false, + "icon": "file-text", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "load_file", + "name": "data", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from pathlib import Path\nfrom tempfile import NamedTemporaryFile\nfrom zipfile import ZipFile, is_zipfile\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, IntInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n \"\"\"Handles loading of individual or zipped text files.\n\n Processes multiple valid files within a zip archive if provided.\n\n Attributes:\n display_name: Display name of the component.\n description: Brief component description.\n icon: Icon to represent the component.\n name: Identifier for the component.\n inputs: Inputs required by the component.\n outputs: Output of the component after processing files.\n \"\"\"\n\n display_name = \"File\"\n description = \"Load a file to be used in your project.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=[*TEXT_FILE_TYPES, \"zip\"],\n info=f\"Supported file types: {', '.join([*TEXT_FILE_TYPES, 'zip'])}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n BoolInput(\n name=\"use_multithreading\",\n display_name=\"Use Multithreading\",\n advanced=True,\n info=\"If true, parallel processing will be enabled for zip files.\",\n ),\n IntInput(\n name=\"concurrency_multithreading\",\n display_name=\"Multithreading Concurrency\",\n advanced=True,\n info=\"The maximum number of workers to use, if concurrency is enabled\",\n value=4,\n ),\n ]\n\n outputs = [Output(display_name=\"Data\", name=\"data\", method=\"load_file\")]\n\n def load_file(self) -> Data:\n \"\"\"Load and parse file(s) from a zip archive.\n\n Raises:\n ValueError: If no file is uploaded or file path is invalid.\n\n Returns:\n Data: Parsed data from file(s).\n \"\"\"\n # Check if the file path is provided\n if not self.path:\n self.log(\"File path is missing.\")\n msg = \"Please upload a file for processing.\"\n\n raise ValueError(msg)\n\n resolved_path = Path(self.resolve_path(self.path))\n try:\n # Check if the file is a zip archive\n if is_zipfile(resolved_path):\n self.log(f\"Processing zip file: {resolved_path.name}.\")\n\n return self._process_zip_file(\n resolved_path,\n silent_errors=self.silent_errors,\n parallel=self.use_multithreading,\n )\n\n self.log(f\"Processing single file: {resolved_path.name}.\")\n\n return self._process_single_file(resolved_path, silent_errors=self.silent_errors)\n except FileNotFoundError:\n self.log(f\"File not found: {resolved_path.name}.\")\n\n raise\n\n def _process_zip_file(self, zip_path: Path, *, silent_errors: bool = False, parallel: bool = False) -> Data:\n \"\"\"Process text files within a zip archive.\n\n Args:\n zip_path: Path to the zip file.\n silent_errors: Suppresses errors if True.\n parallel: Enables parallel processing if True.\n\n Returns:\n list[Data]: Combined data from all valid files.\n\n Raises:\n ValueError: If no valid files found in the archive.\n \"\"\"\n data: list[Data] = []\n with ZipFile(zip_path, \"r\") as zip_file:\n # Filter file names based on extensions in TEXT_FILE_TYPES and ignore hidden files\n valid_files = [\n name\n for name in zip_file.namelist()\n if (\n any(name.endswith(ext) for ext in TEXT_FILE_TYPES)\n and not name.startswith(\"__MACOSX\")\n and not name.startswith(\".\")\n )\n ]\n\n # Raise an error if no valid files found\n if not valid_files:\n self.log(\"No valid files in the zip archive.\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return data # type: ignore[return-value]\n\n # Raise an error if no valid files found\n msg = \"No valid files in the zip archive.\"\n raise ValueError(msg)\n\n # Define a function to process each file\n def process_file(file_name, silent_errors=silent_errors):\n with NamedTemporaryFile(delete=False) as temp_file:\n temp_path = Path(temp_file.name).with_name(file_name)\n with zip_file.open(file_name) as file_content:\n temp_path.write_bytes(file_content.read())\n try:\n return self._process_single_file(temp_path, silent_errors=silent_errors)\n finally:\n temp_path.unlink()\n\n # Process files in parallel if specified\n if parallel:\n self.log(\n f\"Initializing parallel Thread Pool Executor with max workers: \"\n f\"{self.concurrency_multithreading}.\"\n )\n\n # Process files in parallel\n initial_data = parallel_load_data(\n valid_files,\n silent_errors=silent_errors,\n load_function=process_file,\n max_concurrency=self.concurrency_multithreading,\n )\n\n # Filter out empty data\n data = list(filter(None, initial_data))\n else:\n # Sequential processing\n data = [process_file(file_name) for file_name in valid_files]\n\n self.log(f\"Successfully processed zip file: {zip_path.name}.\")\n\n return data # type: ignore[return-value]\n\n def _process_single_file(self, file_path: Path, *, silent_errors: bool = False) -> Data:\n \"\"\"Process a single file.\n\n Args:\n file_path: Path to the file.\n silent_errors: Suppresses errors if True.\n\n Returns:\n Data: Parsed data from the file.\n\n Raises:\n ValueError: For unsupported file formats.\n \"\"\"\n # Check if the file type is supported\n if not any(file_path.suffix == ext for ext in [\".\" + f for f in TEXT_FILE_TYPES]):\n self.log(f\"Unsupported file type: {file_path.suffix}\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return Data()\n\n msg = f\"Unsupported file type: {file_path.suffix}\"\n raise ValueError(msg)\n\n try:\n # Parse the text file as appropriate\n data = parse_text_file_to_data(str(file_path), silent_errors=silent_errors) # type: ignore[assignment]\n if not data:\n data = Data()\n\n self.log(f\"Successfully processed file: {file_path.name}.\")\n except Exception as e:\n self.log(f\"Error processing file {file_path.name}: {e}\")\n\n # Return empty data if silent_errors is True\n if not silent_errors:\n raise\n\n data = Data()\n\n return data\n" + }, + "concurrency_multithreading": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Multithreading Concurrency", + "dynamic": false, + "info": "The maximum number of workers to use, if concurrency is enabled", + "list": false, + "name": "concurrency_multithreading", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 4 + }, + "path": { + "_input_type": "FileInput", + "advanced": false, + "display_name": "Path", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "zip" + ], + "file_path": "", + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx, zip", + "list": false, + "name": "path", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "silent_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Silent Errors", + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "list": false, + "name": "silent_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "use_multithreading": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Use Multithreading", + "dynamic": false, + "info": "If true, parallel processing will be enabled for zip files.", + "list": false, + "name": "use_multithreading", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + } + }, + "tool_mode": false + }, + "type": "File" + }, + "dragging": false, + "height": 232, + "id": "File-dlDLp", + "position": { + "x": 155.39382083637838, + "y": -82.32805525710685 + }, + "positionAbsolute": { + "x": 155.39382083637838, + "y": -82.32805525710685 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-L5CiD", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["Document"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "Document": { + "advanced": false, + "display_name": "Document", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "Document", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "prompt", + "value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 347, + "id": "Prompt-L5CiD", + "position": { + "x": 895.1947781377585, + "y": -59.89409263992732 + }, + "positionAbsolute": { + "x": 895.1947781377585, + "y": -59.89409263992732 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": 262.21464656923195, + "y": 450.3754323717522, + "zoom": 0.5739369419687381 + } + }, + "description": "Integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", + "endpoint_name": null, + "icon": "FileQuestion", + "id": "febba2f9-69b3-484b-8aef-65626810ec8a", + "gradient": "3", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Document Q&A", + "tags": ["rag", "q-a", "openai"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json deleted file mode 100644 index 5a8fe5dabb0a..000000000000 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ /dev/null @@ -1,1262 +0,0 @@ -{ - "data": { - "edges": [ - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-Emi4q", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "Question", - "id": "Prompt-n8yRL", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-Emi4q{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Emi4qœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-n8yRL{œfieldNameœ:œQuestionœ,œidœ:œPrompt-n8yRLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-Emi4q", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Emi4qœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-n8yRL", - "targetHandle": "{œfieldNameœ: œQuestionœ, œidœ: œPrompt-n8yRLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-n8yRL", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-1hwZ2", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-n8yRL{œdataTypeœ:œPromptœ,œidœ:œPrompt-n8yRLœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-1hwZ2{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-1hwZ2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-n8yRL", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-n8yRLœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-1hwZ2", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-1hwZ2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-1hwZ2", - "name": "text_output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-sD0lp", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-OpenAIModel-1hwZ2{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-1hwZ2œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-sD0lp{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-sD0lpœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-1hwZ2", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-1hwZ2œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-sD0lp", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-sD0lpœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ParseData", - "id": "ParseData-qYLes", - "name": "text", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "Document", - "id": "Prompt-n8yRL", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ParseData-qYLes{œdataTypeœ:œParseDataœ,œidœ:œParseData-qYLesœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-n8yRL{œfieldNameœ:œDocumentœ,œidœ:œPrompt-n8yRLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-qYLes", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-qYLesœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-n8yRL", - "targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-n8yRLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "File", - "id": "File-0oa6O", - "name": "data", - "output_types": [ - "Data" - ] - }, - "targetHandle": { - "fieldName": "data", - "id": "ParseData-qYLes", - "inputTypes": [ - "Data" - ], - "type": "other" - } - }, - "id": "reactflow__edge-File-0oa6O{œdataTypeœ:œFileœ,œidœ:œFile-0oa6Oœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-qYLes{œfieldNameœ:œdataœ,œidœ:œParseData-qYLesœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-0oa6O", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-0oa6Oœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-qYLes", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-qYLesœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" - } - ], - "nodes": [ - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-n8yRL", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "Document", - "Question" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "field_order": [ - "template" - ], - "frozen": false, - "icon": "prompts", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "Document": { - "advanced": false, - "display_name": "Document", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "Document", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "Question": { - "advanced": false, - "display_name": "Question", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "Question", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:\n{Question}\n\nAnswer:\n" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 517, - "id": "Prompt-n8yRL", - "position": { - "x": 637.3518652087848, - "y": 47.191730368560215 - }, - "positionAbsolute": { - "x": 637.3518652087848, - "y": 47.191730368560215 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "id": "ChatInput-Emi4q", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "files" - ], - "frozen": false, - "icon": "ChatInput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "files": { - "advanced": true, - "display_name": "Files", - "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx", - "jpg", - "jpeg", - "png", - "bmp", - "image" - ], - "file_path": "", - "info": "Files to be sent with the message.", - "list": true, - "name": "files", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "file", - "value": "" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatInput" - }, - "dragging": false, - "height": 309, - "id": "ChatInput-Emi4q", - "position": { - "x": 50.08709924122684, - "y": 320.88186720121615 - }, - "positionAbsolute": { - "x": 50.08709924122684, - "y": 320.88186720121615 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-sD0lp", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "data_template": { - "advanced": true, - "display_name": "Data Template", - "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "data_template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Machine" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "AI" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 309, - "id": "ChatOutput-sD0lp", - "position": { - "x": 1831.1359796346408, - "y": 139.5174517327903 - }, - "positionAbsolute": { - "x": 1831.1359796346408, - "y": 139.5174517327903 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-1hwZ2", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 623, - "id": "OpenAIModel-1hwZ2", - "position": { - "x": 1264.0039093582332, - "y": -67.93731748926709 - }, - "positionAbsolute": { - "x": 1264.0039093582332, - "y": -67.93731748926709 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Convert Data into plain text following a specified template.", - "display_name": "Parse Data", - "id": "ParseData-qYLes", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Convert Data into plain text following a specified template.", - "display_name": "Parse Data", - "documentation": "", - "edited": false, - "field_order": [ - "data", - "template", - "sep" - ], - "frozen": false, - "icon": "braces", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "parse_data", - "name": "text", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" - }, - "data": { - "advanced": false, - "display_name": "Data", - "dynamic": false, - "info": "The data to convert to text.", - "input_types": [ - "Data" - ], - "list": false, - "name": "data", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "sep": { - "advanced": true, - "display_name": "Separator", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "sep", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - } - } - }, - "type": "ParseData" - }, - "dragging": false, - "height": 385, - "id": "ParseData-qYLes", - "position": { - "x": 87.26129917199853, - "y": -181.46350622708565 - }, - "positionAbsolute": { - "x": 87.26129917199853, - "y": -181.46350622708565 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "A generic file loader.", - "display_name": "File", - "id": "File-0oa6O", - "node": { - "base_classes": [ - "Data" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "A generic file loader.", - "display_name": "File", - "documentation": "", - "edited": false, - "field_order": [ - "path", - "silent_errors" - ], - "frozen": false, - "icon": "file-text", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Data", - "method": "load_file", - "name": "data", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" - }, - "path": { - "advanced": false, - "display_name": "Path", - "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx" - ], - "file_path": "049e2133-b45d-44a3-906a-1e8be93d9d7b/Prompt Engineering Guide 3ff66c517bff423c863bbb1c0eb21be8.md", - "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", - "list": false, - "name": "path", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "file", - "value": "" - }, - "silent_errors": { - "advanced": true, - "display_name": "Silent Errors", - "dynamic": false, - "info": "If true, errors will not raise an exception.", - "list": false, - "name": "silent_errors", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "File" - }, - "dragging": false, - "height": 301, - "id": "File-0oa6O", - "position": { - "x": -462.90407701896845, - "y": -316.82165433756165 - }, - "positionAbsolute": { - "x": -462.90407701896845, - "y": -316.82165433756165 - }, - "selected": false, - "type": "genericNode", - "width": 384 - } - ], - "viewport": { - "x": 338.5057346133326, - "y": 271.3577975536086, - "zoom": 0.36856730432277524 - } - }, - "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", - "endpoint_name": null, - "id": "6fb449a7-563c-446e-82d2-36f3defb9a48", - "is_component": false, - "last_tested_version": "1.0.9", - "name": "Document QA" -} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json deleted file mode 100644 index ce0a3442a364..000000000000 --- a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json +++ /dev/null @@ -1,2761 +0,0 @@ -{ - "data": { - "edges": [ - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "HierarchicalCrewComponent", - "id": "HierarchicalCrewComponent-Y0Uvf", - "name": "output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-VzVJK", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-HierarchicalCrewComponent-Y0Uvf{œdataTypeœ:œHierarchicalCrewComponentœ,œidœ:œHierarchicalCrewComponent-Y0Uvfœ,œnameœ:œoutputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-VzVJK{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-VzVJKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "selected": false, - "source": "HierarchicalCrewComponent-Y0Uvf", - "sourceHandle": "{œdataTypeœ: œHierarchicalCrewComponentœ, œidœ: œHierarchicalCrewComponent-Y0Uvfœ, œnameœ: œoutputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-VzVJK", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-VzVJKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "HierarchicalTaskComponent", - "id": "HierarchicalTaskComponent-hE8H5", - "name": "task_output", - "output_types": [ - "HierarchicalTask" - ] - }, - "targetHandle": { - "fieldName": "tasks", - "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": [ - "HierarchicalTask" - ], - "type": "other" - } - }, - "id": "reactflow__edge-HierarchicalTaskComponent-hE8H5{œdataTypeœ:œHierarchicalTaskComponentœ,œidœ:œHierarchicalTaskComponent-hE8H5œ,œnameœ:œtask_outputœ,œoutput_typesœ:[œHierarchicalTaskœ]}-HierarchicalCrewComponent-Y0Uvf{œfieldNameœ:œtasksœ,œidœ:œHierarchicalCrewComponent-Y0Uvfœ,œinputTypesœ:[œHierarchicalTaskœ],œtypeœ:œotherœ}", - "selected": false, - "source": "HierarchicalTaskComponent-hE8H5", - "sourceHandle": "{œdataTypeœ: œHierarchicalTaskComponentœ, œidœ: œHierarchicalTaskComponent-hE8H5œ, œnameœ: œtask_outputœ, œoutput_typesœ: [œHierarchicalTaskœ]}", - "target": "HierarchicalCrewComponent-Y0Uvf", - "targetHandle": "{œfieldNameœ: œtasksœ, œidœ: œHierarchicalCrewComponent-Y0Uvfœ, œinputTypesœ: [œHierarchicalTaskœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "CrewAIAgentComponent", - "id": "CrewAIAgentComponent-EbpXd", - "name": "output", - "output_types": [ - "Agent" - ] - }, - "targetHandle": { - "fieldName": "agents", - "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": [ - "Agent" - ], - "type": "other" - } - }, - "id": "reactflow__edge-CrewAIAgentComponent-EbpXd{œdataTypeœ:œCrewAIAgentComponentœ,œidœ:œCrewAIAgentComponent-EbpXdœ,œnameœ:œoutputœ,œoutput_typesœ:[œAgentœ]}-HierarchicalCrewComponent-Y0Uvf{œfieldNameœ:œagentsœ,œidœ:œHierarchicalCrewComponent-Y0Uvfœ,œinputTypesœ:[œAgentœ],œtypeœ:œotherœ}", - "selected": false, - "source": "CrewAIAgentComponent-EbpXd", - "sourceHandle": "{œdataTypeœ: œCrewAIAgentComponentœ, œidœ: œCrewAIAgentComponent-EbpXdœ, œnameœ: œoutputœ, œoutput_typesœ: [œAgentœ]}", - "target": "HierarchicalCrewComponent-Y0Uvf", - "targetHandle": "{œfieldNameœ: œagentsœ, œidœ: œHierarchicalCrewComponent-Y0Uvfœ, œinputTypesœ: [œAgentœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-Yjtpu", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "CrewAIAgentComponent-EbpXd", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-Yjtpu{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Yjtpuœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-CrewAIAgentComponent-EbpXd{œfieldNameœ:œllmœ,œidœ:œCrewAIAgentComponent-EbpXdœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "selected": false, - "source": "OpenAIModel-Yjtpu", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-Yjtpuœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "CrewAIAgentComponent-EbpXd", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œCrewAIAgentComponent-EbpXdœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "CrewAIAgentComponent", - "id": "CrewAIAgentComponent-9D8ao", - "name": "output", - "output_types": [ - "Agent" - ] - }, - "targetHandle": { - "fieldName": "manager_agent", - "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": [ - "Agent" - ], - "type": "other" - } - }, - "id": "reactflow__edge-CrewAIAgentComponent-9D8ao{œdataTypeœ:œCrewAIAgentComponentœ,œidœ:œCrewAIAgentComponent-9D8aoœ,œnameœ:œoutputœ,œoutput_typesœ:[œAgentœ]}-HierarchicalCrewComponent-Y0Uvf{œfieldNameœ:œmanager_agentœ,œidœ:œHierarchicalCrewComponent-Y0Uvfœ,œinputTypesœ:[œAgentœ],œtypeœ:œotherœ}", - "selected": false, - "source": "CrewAIAgentComponent-9D8ao", - "sourceHandle": "{œdataTypeœ: œCrewAIAgentComponentœ, œidœ: œCrewAIAgentComponent-9D8aoœ, œnameœ: œoutputœ, œoutput_typesœ: [œAgentœ]}", - "target": "HierarchicalCrewComponent-Y0Uvf", - "targetHandle": "{œfieldNameœ: œmanager_agentœ, œidœ: œHierarchicalCrewComponent-Y0Uvfœ, œinputTypesœ: [œAgentœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-HgNnu", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "CrewAIAgentComponent-9D8ao", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-HgNnu{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-HgNnuœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-CrewAIAgentComponent-9D8ao{œfieldNameœ:œllmœ,œidœ:œCrewAIAgentComponent-9D8aoœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "selected": false, - "source": "OpenAIModel-HgNnu", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-HgNnuœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "CrewAIAgentComponent-9D8ao", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œCrewAIAgentComponent-9D8aoœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-eqGhn", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "task_description", - "id": "HierarchicalTaskComponent-hE8H5", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-eqGhn{œdataTypeœ:œPromptœ,œidœ:œPrompt-eqGhnœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-HierarchicalTaskComponent-hE8H5{œfieldNameœ:œtask_descriptionœ,œidœ:œHierarchicalTaskComponent-hE8H5œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-eqGhn", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-eqGhnœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "HierarchicalTaskComponent-hE8H5", - "targetHandle": "{œfieldNameœ: œtask_descriptionœ, œidœ: œHierarchicalTaskComponent-hE8H5œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-xgRl9", - "name": "message", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "query", - "id": "Prompt-eqGhn", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-ChatInput-xgRl9{œdataTypeœ:œChatInputœ,œidœ:œChatInput-xgRl9œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-eqGhn{œfieldNameœ:œqueryœ,œidœ:œPrompt-eqGhnœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-xgRl9", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-xgRl9œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-eqGhn", - "targetHandle": "{œfieldNameœ: œqueryœ, œidœ: œPrompt-eqGhnœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "CrewAIAgentComponent", - "id": "CrewAIAgentComponent-UMpxO", - "name": "output", - "output_types": [ - "Agent" - ] - }, - "targetHandle": { - "fieldName": "agents", - "id": "HierarchicalCrewComponent-Y0Uvf", - "inputTypes": [ - "Agent" - ], - "type": "other" - } - }, - "id": "reactflow__edge-CrewAIAgentComponent-UMpxO{œdataTypeœ:œCrewAIAgentComponentœ,œidœ:œCrewAIAgentComponent-UMpxOœ,œnameœ:œoutputœ,œoutput_typesœ:[œAgentœ]}-HierarchicalCrewComponent-Y0Uvf{œfieldNameœ:œagentsœ,œidœ:œHierarchicalCrewComponent-Y0Uvfœ,œinputTypesœ:[œAgentœ],œtypeœ:œotherœ}", - "source": "CrewAIAgentComponent-UMpxO", - "sourceHandle": "{œdataTypeœ: œCrewAIAgentComponentœ, œidœ: œCrewAIAgentComponent-UMpxOœ, œnameœ: œoutputœ, œoutput_typesœ: [œAgentœ]}", - "target": "HierarchicalCrewComponent-Y0Uvf", - "targetHandle": "{œfieldNameœ: œagentsœ, œidœ: œHierarchicalCrewComponent-Y0Uvfœ, œinputTypesœ: [œAgentœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-Yjtpu", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "CrewAIAgentComponent-UMpxO", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-Yjtpu{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Yjtpuœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-CrewAIAgentComponent-UMpxO{œfieldNameœ:œllmœ,œidœ:œCrewAIAgentComponent-UMpxOœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "source": "OpenAIModel-Yjtpu", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-Yjtpuœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "CrewAIAgentComponent-UMpxO", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œCrewAIAgentComponent-UMpxOœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "SearchAPI", - "id": "SearchAPI-Yokat", - "name": "api_build_tool", - "output_types": [ - "Tool" - ] - }, - "targetHandle": { - "fieldName": "tools", - "id": "CrewAIAgentComponent-EbpXd", - "inputTypes": [ - "Tool" - ], - "type": "other" - } - }, - "id": "reactflow__edge-SearchAPI-Yokat{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-Yokatœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-CrewAIAgentComponent-EbpXd{œfieldNameœ:œtoolsœ,œidœ:œCrewAIAgentComponent-EbpXdœ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}", - "source": "SearchAPI-Yokat", - "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-Yokatœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", - "target": "CrewAIAgentComponent-EbpXd", - "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œCrewAIAgentComponent-EbpXdœ, œinputTypesœ: [œToolœ], œtypeœ: œotherœ}" - } - ], - "nodes": [ - { - "data": { - "description": "Represents a group of agents, defining how they should collaborate and the tasks they should perform.", - "display_name": "Hierarchical Crew", - "id": "HierarchicalCrewComponent-Y0Uvf", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents a group of agents, defining how they should collaborate and the tasks they should perform.", - "display_name": "Hierarchical Crew", - "documentation": "", - "edited": false, - "field_order": [ - "verbose", - "memory", - "use_cache", - "max_rpm", - "share_crew", - "function_calling_llm", - "agents", - "tasks", - "manager_llm", - "manager_agent" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Output", - "method": "build_output", - "name": "output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "agents": { - "advanced": false, - "display_name": "Agents", - "dynamic": false, - "info": "", - "input_types": [ - "Agent" - ], - "list": true, - "name": "agents", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Crew, Process # type: ignore\n\nfrom langflow.base.agents.crewai.crew import BaseCrewComponent\nfrom langflow.io import HandleInput\n\n\nclass HierarchicalCrewComponent(BaseCrewComponent):\n display_name: str = \"Hierarchical Crew\"\n description: str = (\n \"Represents a group of agents, defining how they should collaborate and the tasks they should perform.\"\n )\n documentation: str = \"https://docs.crewai.com/how-to/Hierarchical/\"\n icon = \"CrewAI\"\n\n inputs = BaseCrewComponent._base_inputs + [\n HandleInput(name=\"agents\", display_name=\"Agents\", input_types=[\"Agent\"], is_list=True),\n HandleInput(name=\"tasks\", display_name=\"Tasks\", input_types=[\"HierarchicalTask\"], is_list=True),\n HandleInput(name=\"manager_llm\", display_name=\"Manager LLM\", input_types=[\"LanguageModel\"], required=False),\n HandleInput(name=\"manager_agent\", display_name=\"Manager Agent\", input_types=[\"Agent\"], required=False),\n ]\n\n def build_crew(self) -> Crew:\n tasks, agents = self.get_tasks_and_agents()\n crew = Crew(\n agents=agents,\n tasks=tasks,\n process=Process.hierarchical,\n verbose=self.verbose,\n memory=self.memory,\n cache=self.use_cache,\n max_rpm=self.max_rpm,\n share_crew=self.share_crew,\n function_calling_llm=self.function_calling_llm,\n manager_agent=self.manager_agent,\n manager_llm=self.manager_llm,\n step_callback=self.get_step_callback(),\n task_callback=self.get_task_callback(),\n )\n return crew\n" - }, - "function_calling_llm": { - "advanced": true, - "display_name": "Function Calling LLM", - "dynamic": false, - "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "function_calling_llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "manager_agent": { - "advanced": false, - "display_name": "Manager Agent", - "dynamic": false, - "info": "", - "input_types": [ - "Agent" - ], - "list": false, - "name": "manager_agent", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "manager_llm": { - "advanced": false, - "display_name": "Manager LLM", - "dynamic": false, - "info": "", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "manager_llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "max_rpm": { - "advanced": true, - "display_name": "Max RPM", - "dynamic": false, - "info": "", - "list": false, - "name": "max_rpm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 100 - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "share_crew": { - "advanced": true, - "display_name": "Share Crew", - "dynamic": false, - "info": "", - "list": false, - "name": "share_crew", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "tasks": { - "advanced": false, - "display_name": "Tasks", - "dynamic": false, - "info": "", - "input_types": [ - "HierarchicalTask" - ], - "list": true, - "name": "tasks", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "use_cache": { - "advanced": true, - "display_name": "Cache", - "dynamic": false, - "info": "", - "list": false, - "name": "use_cache", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 0 - } - } - }, - "type": "HierarchicalCrewComponent" - }, - "dragging": false, - "height": 516, - "id": "HierarchicalCrewComponent-Y0Uvf", - "position": { - "x": 568, - "y": 352.296875 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-Yjtpu", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "system_message", - "stream", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "api_key", - "temperature", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "load_from_db": false, - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 700, - "id": "OpenAIModel-Yjtpu", - "position": { - "x": -1222.8457213471152, - "y": 699.9983009041272 - }, - "positionAbsolute": { - "x": -1222.8457213471152, - "y": 699.9983009041272 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-VzVJK", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "data_template": { - "advanced": true, - "display_name": "Data Template", - "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "data_template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Machine" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "AI" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 347, - "id": "ChatOutput-VzVJK", - "position": { - "x": 1070.9148596889393, - "y": 499.80777483894144 - }, - "positionAbsolute": { - "x": 1070.9148596889393, - "y": 499.80777483894144 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Each task must have a description, an expected output and an agent responsible for execution.", - "display_name": "Hierarchical Task", - "id": "HierarchicalTaskComponent-hE8H5", - "node": { - "base_classes": [ - "HierarchicalTask" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Each task must have a description, an expected output and an agent responsible for execution.", - "display_name": "Hierarchical Task", - "documentation": "", - "edited": false, - "field_order": [ - "task_description", - "expected_output", - "tools" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Task", - "method": "build_task", - "name": "task_output", - "selected": "HierarchicalTask", - "types": [ - "HierarchicalTask" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.agents.crewai.tasks import HierarchicalTask\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, MultilineInput, Output\n\n\nclass HierarchicalTaskComponent(Component):\n display_name: str = \"Hierarchical Task\"\n description: str = \"Each task must have a description, an expected output and an agent responsible for execution.\"\n icon = \"CrewAI\"\n inputs = [\n MultilineInput(\n name=\"task_description\",\n display_name=\"Description\",\n info=\"Descriptive text detailing task's purpose and execution.\",\n ),\n MultilineInput(\n name=\"expected_output\",\n display_name=\"Expected Output\",\n info=\"Clear definition of expected task outcome.\",\n ),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"List of tools/resources limited for task execution. Uses the Agent tools by default.\",\n required=False,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Task\", name=\"task_output\", method=\"build_task\"),\n ]\n\n def build_task(self) -> HierarchicalTask:\n task = HierarchicalTask(\n description=self.task_description,\n expected_output=self.expected_output,\n tools=self.tools or [],\n )\n self.status = task\n return task\n" - }, - "expected_output": { - "advanced": false, - "display_name": "Expected Output", - "dynamic": false, - "info": "Clear definition of expected task outcome.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "expected_output", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Succinct response that answers the User's query." - }, - "task_description": { - "advanced": false, - "display_name": "Description", - "dynamic": false, - "info": "Descriptive text detailing task's purpose and execution.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "task_description", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "advanced": true, - "display_name": "Tools", - "dynamic": false, - "info": "List of tools/resources limited for task execution. Uses the Agent tools by default.", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - } - } - }, - "type": "HierarchicalTaskComponent" - }, - "dragging": false, - "height": 480, - "id": "HierarchicalTaskComponent-hE8H5", - "position": { - "x": 63.673086094601445, - "y": -375.5883288441139 - }, - "positionAbsolute": { - "x": 63.673086094601445, - "y": -375.5883288441139 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "id": "CrewAIAgentComponent-EbpXd", - "node": { - "base_classes": [ - "Agent" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "kwargs" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Agent", - "method": "build_output", - "name": "output", - "selected": "Agent", - "types": [ - "Agent" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "allow_code_execution": { - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "advanced": false, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "backstory": { - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You are a reliable researcher and journalist " - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent # type: ignore\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass CrewAIAgentComponent(Component):\n display_name = \"CrewAI Agent\"\n description = \"Represents an agent of CrewAI.\"\n documentation: str = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(name=\"backstory\", display_name=\"Backstory\", info=\"The backstory of the agent.\"),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agents disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=False,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"kwargs\",\n display_name=\"kwargs\",\n info=\"kwargs of agent.\",\n is_list=True,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Agent\", name=\"output\", method=\"build_output\"),\n ]\n\n def build_output(self) -> Agent:\n kwargs = self.kwargs if self.kwargs else {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **kwargs,\n )\n self.status = repr(agent)\n return agent\n" - }, - "goal": { - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Search for information about the User's query and answer as best as you can" - }, - "kwargs": { - "advanced": true, - "display_name": "kwargs", - "dynamic": false, - "info": "kwargs of agent.", - "list": true, - "name": "kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "llm": { - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "role": { - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Research" - }, - "tools": { - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agents disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "CrewAIAgentComponent" - }, - "dragging": false, - "height": 746, - "id": "CrewAIAgentComponent-EbpXd", - "position": { - "x": -479.3544836211363, - "y": 184.85118314299666 - }, - "positionAbsolute": { - "x": -479.3544836211363, - "y": 184.85118314299666 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "id": "CrewAIAgentComponent-9D8ao", - "node": { - "base_classes": [ - "Agent" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "kwargs" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Agent", - "method": "build_output", - "name": "output", - "selected": "Agent", - "types": [ - "Agent" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "allow_code_execution": { - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "advanced": false, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "backstory": { - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You are polite and helpful. You've always been a beacon of politeness." - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent # type: ignore\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass CrewAIAgentComponent(Component):\n display_name = \"CrewAI Agent\"\n description = \"Represents an agent of CrewAI.\"\n documentation: str = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(name=\"backstory\", display_name=\"Backstory\", info=\"The backstory of the agent.\"),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agents disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=False,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"kwargs\",\n display_name=\"kwargs\",\n info=\"kwargs of agent.\",\n is_list=True,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Agent\", name=\"output\", method=\"build_output\"),\n ]\n\n def build_output(self) -> Agent:\n kwargs = self.kwargs if self.kwargs else {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **kwargs,\n )\n self.status = repr(agent)\n return agent\n" - }, - "goal": { - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You can answer general questions from the User and may call others for help if needed." - }, - "kwargs": { - "advanced": true, - "display_name": "kwargs", - "dynamic": false, - "info": "kwargs of agent.", - "list": true, - "name": "kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "llm": { - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "role": { - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Manager" - }, - "tools": { - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agents disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "CrewAIAgentComponent" - }, - "dragging": false, - "height": 746, - "id": "CrewAIAgentComponent-9D8ao", - "position": { - "x": -451.69853654304495, - "y": 1664.003205846104 - }, - "positionAbsolute": { - "x": -451.69853654304495, - "y": 1664.003205846104 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-HgNnu", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "system_message", - "stream", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "api_key", - "temperature", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "load_from_db": false, - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 700, - "id": "OpenAIModel-HgNnu", - "position": { - "x": -1157.0105401589535, - "y": 1699.4665001507685 - }, - "positionAbsolute": { - "x": -1157.0105401589535, - "y": 1699.4665001507685 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-eqGhn", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "query" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "error": null, - "field_order": [ - "template" - ], - "frozen": false, - "full_path": null, - "icon": "prompts", - "is_composition": null, - "is_input": null, - "is_output": null, - "name": "", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "query": { - "advanced": false, - "display_name": "query", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "query", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "User's query:\n{query}\n\nRespond to the user with as much as information as you can about the topic. Delete if needed. If it is just a general query (e.g a greeting) you can respond them directly." - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 475, - "id": "Prompt-eqGhn", - "position": { - "x": -561.9017558579421, - "y": -433.5913161630874 - }, - "positionAbsolute": { - "x": -561.9017558579421, - "y": -433.5913161630874 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "id": "ChatInput-xgRl9", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "files" - ], - "frozen": false, - "icon": "ChatInput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "files": { - "advanced": true, - "display_name": "Files", - "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx", - "jpg", - "jpeg", - "png", - "bmp", - "image" - ], - "file_path": "", - "info": "Files to be sent with the message.", - "list": true, - "name": "files", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "file", - "value": "" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "What is Langflow?" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "User" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatInput" - }, - "dragging": false, - "height": 347, - "id": "ChatInput-xgRl9", - "position": { - "x": -1227.1161422332996, - "y": -380.26504479310324 - }, - "positionAbsolute": { - "x": -1227.1161422332996, - "y": -380.26504479310324 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "id": "CrewAIAgentComponent-UMpxO", - "node": { - "base_classes": [ - "Agent" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents an agent of CrewAI.", - "display_name": "CrewAI Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "kwargs" - ], - "frozen": false, - "icon": "CrewAI", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Agent", - "method": "build_output", - "name": "output", - "selected": "Agent", - "types": [ - "Agent" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "allow_code_execution": { - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "advanced": false, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "backstory": { - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You are a reliable researcher and journalist " - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent # type: ignore\n\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass CrewAIAgentComponent(Component):\n display_name = \"CrewAI Agent\"\n description = \"Represents an agent of CrewAI.\"\n documentation: str = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(name=\"backstory\", display_name=\"Backstory\", info=\"The backstory of the agent.\"),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agents disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=False,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"kwargs\",\n display_name=\"kwargs\",\n info=\"kwargs of agent.\",\n is_list=True,\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Agent\", name=\"output\", method=\"build_output\"),\n ]\n\n def build_output(self) -> Agent:\n kwargs = self.kwargs if self.kwargs else {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **kwargs,\n )\n self.status = repr(agent)\n return agent\n" - }, - "goal": { - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Evaluate the information for misleading or biased data." - }, - "kwargs": { - "advanced": true, - "display_name": "kwargs", - "dynamic": false, - "info": "kwargs of agent.", - "list": true, - "name": "kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "llm": { - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "role": { - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Editor" - }, - "tools": { - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agents disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - } - } - }, - "type": "CrewAIAgentComponent" - }, - "dragging": false, - "height": 746, - "id": "CrewAIAgentComponent-UMpxO", - "position": { - "x": -397.2627184897183, - "y": 892.6280687419107 - }, - "positionAbsolute": { - "x": -397.2627184897183, - "y": 892.6280687419107 - }, - "selected": false, - "type": "genericNode", - "width": 432 - }, - { - "data": { - "id": "SearchAPI-Yokat", - "node": { - "base_classes": [ - "Data", - "Tool" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Call the searchapi.io API", - "display_name": "Search API", - "documentation": "https://www.searchapi.io/docs/google", - "edited": false, - "field_order": [ - "engine", - "api_key", - "input_value", - "search_params" - ], - "frozen": false, - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Data", - "method": "run_model", - "name": "api_run_model", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Tool", - "method": "build_tool", - "name": "api_build_tool", - "selected": "Tool", - "types": [ - "Tool" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "advanced": false, - "display_name": "SearchAPI API Key", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from typing import Union\n\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, MultilineInput, DictInput, MessageTextInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\n\n\nclass SearchAPIComponent(LCToolComponent):\n display_name: str = \"Search API\"\n description: str = \"Call the searchapi.io API\"\n name = \"SearchAPI\"\n documentation: str = \"https://www.searchapi.io/docs/google\"\n\n inputs = [\n MessageTextInput(name=\"engine\", display_name=\"Engine\", value=\"google\"),\n SecretStrInput(name=\"api_key\", display_name=\"SearchAPI API Key\", required=True),\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input\",\n ),\n DictInput(name=\"search_params\", display_name=\"Search parameters\", advanced=True, is_list=True),\n ]\n\n def run_model(self) -> Union[Data, list[Data]]:\n wrapper = self._build_wrapper()\n results = wrapper.results(query=self.input_value, **(self.search_params or {}))\n list_results = results.get(\"organic_results\", [])\n data = [Data(data=result, text=result[\"snippet\"]) for result in list_results]\n self.status = data\n return data\n\n def build_tool(self) -> Tool:\n wrapper = self._build_wrapper()\n return Tool(\n name=\"search_api\",\n description=\"Search for recent results.\",\n func=lambda x: wrapper.run(query=x, **(self.search_params or {})),\n )\n\n def _build_wrapper(self):\n return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key)\n" - }, - "engine": { - "advanced": false, - "display_name": "Engine", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "engine", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "google" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "search_params": { - "advanced": true, - "display_name": "Search parameters", - "dynamic": false, - "info": "", - "list": true, - "name": "search_params", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - } - } - }, - "type": "SearchAPI" - }, - "dragging": false, - "height": 612, - "id": "SearchAPI-Yokat", - "position": { - "x": -1229.2392382339021, - "y": 59.93107827969652 - }, - "positionAbsolute": { - "x": -1229.2392382339021, - "y": 59.93107827969652 - }, - "selected": false, - "type": "genericNode", - "width": 432 - } - ], - "viewport": { - "x": 538.2178429916559, - "y": 231.5938714083456, - "zoom": 0.25005698994252296 - } - }, - "description": "This Agentic Flow has a Manager and a couple Workers. The manager can answer the user's question straightaway or delegate the task to other agents.", - "endpoint_name": null, - "id": "6302081f-5af2-4b9f-bd19-2baaf7218ba6", - "is_component": false, - "last_tested_version": "1.0.12", - "name": "Hierarchical Tasks Agent" -} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Image Sentiment Analysis.json b/src/backend/base/langflow/initial_setup/starter_projects/Image Sentiment Analysis.json new file mode 100644 index 000000000000..7a78a1a256e2 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Image Sentiment Analysis.json @@ -0,0 +1,1499 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "StructuredOutputComponent", + "id": "StructuredOutputComponent-2koen", + "name": "structured_output", + "output_types": ["Data"] + }, + "targetHandle": { + "fieldName": "data", + "id": "ParseData-qwHwN", + "inputTypes": ["Data"], + "type": "other" + } + }, + "id": "reactflow__edge-StructuredOutputComponent-2koen{œdataTypeœ:œStructuredOutputComponentœ,œidœ:œStructuredOutputComponent-2koenœ,œnameœ:œstructured_outputœ,œoutput_typesœ:[œDataœ]}-ParseData-qwHwN{œfieldNameœ:œdataœ,œidœ:œParseData-qwHwNœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "StructuredOutputComponent-2koen", + "sourceHandle": "{œdataTypeœ: œStructuredOutputComponentœ, œidœ: œStructuredOutputComponent-2koenœ, œnameœ: œstructured_outputœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-qwHwN", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-qwHwNœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ParseData", + "id": "ParseData-qwHwN", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-pZKpN", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ParseData-qwHwN{œdataTypeœ:œParseDataœ,œidœ:œParseData-qwHwNœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-pZKpN{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-pZKpNœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ParseData-qwHwN", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-qwHwNœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-pZKpN", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-pZKpNœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-9oYd2", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "StructuredOutputComponent-2koen", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-9oYd2{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-9oYd2œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-StructuredOutputComponent-2koen{œfieldNameœ:œinput_valueœ,œidœ:œStructuredOutputComponent-2koenœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-9oYd2", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-9oYd2œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "StructuredOutputComponent-2koen", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œStructuredOutputComponent-2koenœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-9oYd2", + "name": "model_output", + "output_types": ["LanguageModel"] + }, + "targetHandle": { + "fieldName": "llm", + "id": "StructuredOutputComponent-2koen", + "inputTypes": ["LanguageModel"], + "type": "other" + } + }, + "id": "reactflow__edge-OpenAIModel-9oYd2{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-9oYd2œ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-StructuredOutputComponent-2koen{œfieldNameœ:œllmœ,œidœ:œStructuredOutputComponent-2koenœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", + "source": "OpenAIModel-9oYd2", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-9oYd2œ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", + "target": "StructuredOutputComponent-2koen", + "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œStructuredOutputComponent-2koenœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-G7si2", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-9oYd2", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-G7si2{œdataTypeœ:œChatInputœ,œidœ:œChatInput-G7si2œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-9oYd2{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-9oYd2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-G7si2", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-G7si2œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-9oYd2", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-9oYd2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-ySGe4", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-9oYd2", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-ySGe4{œdataTypeœ:œPromptœ,œidœ:œPrompt-ySGe4œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-9oYd2{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-9oYd2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-ySGe4", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-ySGe4œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-9oYd2", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-9oYd2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-G7si2", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-G7si2", + "position": { + "x": 1258.8272095125978, + "y": 367.0048451335054 + }, + "positionAbsolute": { + "x": 1258.8272095125978, + "y": 367.0048451335054 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-pZKpN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-pZKpN", + "position": { + "x": 2742.72534045604, + "y": 681.9098282545469 + }, + "positionAbsolute": { + "x": 2742.72534045604, + "y": 681.9098282545469 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-pSMKi", + "node": { + "description": "# Image Sentiment Analysis\nWelcome to the Image Sentiment Classifier - an AI tool for quick image sentiment analysis!\n\n## Instructions\n\n1. **Prepare Your Image**\n - Image should be clear and visible\n\n2. **Upload Options**\n - Open the Playground\n - Click file attachment icon\n - Or drag and drop into playground\n\n3. **Wait for Analysis**\n - System will process the image\n - Uses zero-shot learning\n - Classification happens automatically\n\n4. **Review Results**\n - Get classification: Positive/Negative/Neutral\n - Review confidence level\n - Check reasoning if provided\n\n5. **Expected Classifications**\n - Positive: Happy scenes, smiles, celebrations\n - Negative: Sad scenes, problems, conflicts\n - Neutral: Objects, landscapes, neutral scenes\n\nRemember: The clearer the image, the more accurate the classification! 📸✨", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 583, + "id": "note-pSMKi", + "position": { + "x": 791.7294511578832, + "y": 340.1333942936967 + }, + "positionAbsolute": { + "x": 791.7294511578832, + "y": 340.1333942936967 + }, + "resizing": false, + "selected": false, + "style": { + "height": 583, + "width": 436 + }, + "type": "noteNode", + "width": 436 + }, + { + "data": { + "description": "Transforms LLM responses into **structured data formats**. Ideal for extracting specific information or creating consistent outputs.", + "display_name": "Structured Output", + "id": "StructuredOutputComponent-2koen", + "node": { + "base_classes": ["Data"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Transforms LLM responses into **structured data formats**. Ideal for extracting specific information or creating consistent outputs.", + "display_name": "Structured Output", + "documentation": "", + "edited": false, + "field_order": [ + "llm", + "input_value", + "schema_name", + "output_schema", + "multiple" + ], + "frozen": false, + "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Structured Output", + "method": "build_structured_output", + "name": "structured_output", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import cast\n\nfrom pydantic import BaseModel, Field, create_model\n\nfrom langflow.base.models.chat_result import get_chat_result\nfrom langflow.custom import Component\nfrom langflow.field_typing.constants import LanguageModel\nfrom langflow.helpers.base_model import build_model_from_schema\nfrom langflow.io import BoolInput, HandleInput, MessageTextInput, Output, StrInput, TableInput\nfrom langflow.schema.data import Data\n\n\nclass StructuredOutputComponent(Component):\n display_name = \"Structured Output\"\n description = (\n \"Transforms LLM responses into **structured data formats**. Ideal for extracting specific information \"\n \"or creating consistent outputs.\"\n )\n icon = \"braces\"\n\n inputs = [\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"The language model to use to generate the structured output.\",\n input_types=[\"LanguageModel\"],\n ),\n MessageTextInput(name=\"input_value\", display_name=\"Input message\"),\n StrInput(\n name=\"schema_name\",\n display_name=\"Schema Name\",\n info=\"Provide a name for the output data schema.\",\n ),\n TableInput(\n name=\"output_schema\",\n display_name=\"Output Schema\",\n info=\"Define the structure and data types for the model's output.\",\n table_schema=[\n {\n \"name\": \"name\",\n \"display_name\": \"Name\",\n \"type\": \"str\",\n \"description\": \"Specify the name of the output field.\",\n },\n {\n \"name\": \"description\",\n \"display_name\": \"Description\",\n \"type\": \"str\",\n \"description\": \"Describe the purpose of the output field.\",\n },\n {\n \"name\": \"type\",\n \"display_name\": \"Type\",\n \"type\": \"str\",\n \"description\": (\n \"Indicate the data type of the output field \" \"(e.g., str, int, float, bool, list, dict).\"\n ),\n \"default\": \"text\",\n },\n {\n \"name\": \"multiple\",\n \"display_name\": \"Multiple\",\n \"type\": \"boolean\",\n \"description\": \"Set to True if this output field should be a list of the specified type.\",\n \"default\": \"False\",\n },\n ],\n ),\n BoolInput(\n name=\"multiple\",\n display_name=\"Generate Multiple\",\n info=\"Set to True if the model should generate a list of outputs instead of a single output.\",\n ),\n ]\n\n outputs = [\n Output(name=\"structured_output\", display_name=\"Structured Output\", method=\"build_structured_output\"),\n ]\n\n def build_structured_output(self) -> Data:\n if not hasattr(self.llm, \"with_structured_output\"):\n msg = \"Language model does not support structured output.\"\n raise TypeError(msg)\n if not self.output_schema:\n msg = \"Output schema cannot be empty\"\n raise ValueError(msg)\n\n _output_model = build_model_from_schema(self.output_schema)\n if self.multiple:\n output_model = create_model(\n self.schema_name,\n objects=(list[_output_model], Field(description=f\"A list of {self.schema_name}.\")), # type: ignore[valid-type]\n )\n else:\n output_model = _output_model\n try:\n llm_with_structured_output = cast(LanguageModel, self.llm).with_structured_output(schema=output_model) # type: ignore[valid-type, attr-defined]\n\n except NotImplementedError as exc:\n msg = f\"{self.llm.__class__.__name__} does not support structured output.\"\n raise TypeError(msg) from exc\n config_dict = {\n \"run_name\": self.display_name,\n \"project_name\": self.get_project_name(),\n \"callbacks\": self.get_langchain_callbacks(),\n }\n output = get_chat_result(runnable=llm_with_structured_output, input_value=self.input_value, config=config_dict)\n if isinstance(output, BaseModel):\n output_dict = output.model_dump()\n else:\n msg = f\"Output should be a Pydantic BaseModel, got {type(output)} ({output})\"\n raise TypeError(msg)\n return Data(data=output_dict)\n" + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input message", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "llm": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Language Model", + "dynamic": false, + "info": "The language model to use to generate the structured output.", + "input_types": ["LanguageModel"], + "list": false, + "name": "llm", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "multiple": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Generate Multiple", + "dynamic": false, + "info": "Set to True if the model should generate a list of outputs instead of a single output.", + "list": false, + "name": "multiple", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "output_schema": { + "_input_type": "TableInput", + "advanced": false, + "display_name": "Output Schema", + "dynamic": false, + "info": "Define the structure and data types for the model's output.", + "is_list": true, + "load_from_db": false, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "table_schema": { + "columns": [ + { + "description": "Specify the name of the output field.", + "display_name": "Name", + "filterable": true, + "formatter": "text", + "name": "name", + "sortable": true, + "type": "text" + }, + { + "description": "Describe the purpose of the output field.", + "display_name": "Description", + "filterable": true, + "formatter": "text", + "name": "description", + "sortable": true, + "type": "text" + }, + { + "default": "text", + "description": "Indicate the data type of the output field (e.g., str, int, float, bool, list, dict).", + "display_name": "Type", + "filterable": true, + "formatter": "text", + "name": "type", + "sortable": true, + "type": "text" + }, + { + "default": "False", + "description": "Set to True if this output field should be a list of the specified type.", + "display_name": "Multiple", + "filterable": true, + "formatter": "text", + "name": "multiple", + "sortable": true, + "type": "boolean" + } + ] + }, + "title_case": false, + "trace_as_metadata": true, + "type": "table", + "value": [ + { + "description": "A Positive|Negative value that represents the image.", + "multiple": "False", + "name": "sentiment", + "type": "text" + }, + { + "description": "Brief Description of the image", + "multiple": "False", + "name": "description", + "type": "text" + } + ] + }, + "schema_name": { + "_input_type": "StrInput", + "advanced": false, + "display_name": "Schema Name", + "dynamic": false, + "info": "Provide a name for the output data schema.", + "list": false, + "load_from_db": false, + "name": "schema_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "image_classification" + } + }, + "tool_mode": false + }, + "type": "StructuredOutputComponent" + }, + "dragging": false, + "height": 541, + "id": "StructuredOutputComponent-2koen", + "position": { + "x": 2029.441019694193, + "y": 414.7974622616549 + }, + "positionAbsolute": { + "x": 2029.441019694193, + "y": 414.7974622616549 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ParseData-qwHwN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "documentation": "", + "edited": false, + "field_order": ["data", "template", "sep"], + "frozen": false, + "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "parse_data", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "data": { + "_input_type": "DataInput", + "advanced": false, + "display_name": "Data", + "dynamic": false, + "info": "The data to convert to text.", + "input_types": ["Data"], + "list": false, + "name": "data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "sep": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "Separator", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "sep", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "\n" + }, + "template": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Sentiment: {sentiment} \n\nDescription: {description} " + } + }, + "tool_mode": false + }, + "type": "ParseData" + }, + "dragging": false, + "height": 302, + "id": "ParseData-qwHwN", + "position": { + "x": 2389.490977317181, + "y": 646.9530981549555 + }, + "positionAbsolute": { + "x": 2389.490977317181, + "y": 646.9530981549555 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-9oYd2", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-9oYd2", + "position": { + "x": 1644.460882817477, + "y": 381.35290572159937 + }, + "positionAbsolute": { + "x": 1644.460882817477, + "y": 381.35290572159937 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-ySGe4", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "Classify the image into neutral, negative or positive. " + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-ySGe4", + "position": { + "x": 1262.0179832573751, + "y": 632.1360181124842 + }, + "positionAbsolute": { + "x": 1262.0179832573751, + "y": 632.1360181124842 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -767.6296307973882, + "y": -46.714664128928234, + "zoom": 0.798427315747602 + } + }, + "description": "Analyzes images and categorizes them as positive, negative, or neutral using zero-shot learning.", + "endpoint_name": null, + "icon": "Image", + "id": "0caf0da8-c233-4fc5-9df3-41bb58403885", + "gradient": "2", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Image Sentiment Analysis", + "tags": ["classification"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Instagram Copywriter.json b/src/backend/base/langflow/initial_setup/starter_projects/Instagram Copywriter.json new file mode 100644 index 000000000000..194c14838c8f --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Instagram Copywriter.json @@ -0,0 +1,2851 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-VURDN", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "guidelines", + "id": "Prompt-vFWlB", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-VURDN{œdataTypeœ:œTextInputœ,œidœ:œTextInput-VURDNœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-vFWlB{œfieldNameœ:œguidelinesœ,œidœ:œPrompt-vFWlBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-VURDN", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-VURDNœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-vFWlB", + "targetHandle": "{œfieldNameœ: œguidelinesœ, œidœ: œPrompt-vFWlBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-e0DLW", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "post", + "id": "Prompt-0R6oq", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-e0DLW{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-e0DLWœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-0R6oq{œfieldNameœ:œpostœ,œidœ:œPrompt-0R6oqœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-e0DLW", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-e0DLWœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-0R6oq", + "targetHandle": "{œfieldNameœ: œpostœ, œidœ: œPrompt-0R6oqœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-vFWlB", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-e0DLW", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-vFWlB{œdataTypeœ:œPromptœ,œidœ:œPrompt-vFWlBœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-e0DLW{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-e0DLWœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-vFWlB", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-vFWlBœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-e0DLW", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-e0DLWœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TavilyAISearch", + "id": "TavilyAISearch-AN1Hv", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-9Wf58", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-TavilyAISearch-AN1Hv{œdataTypeœ:œTavilyAISearchœ,œidœ:œTavilyAISearch-AN1Hvœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-9Wf58{œfieldNameœ:œtoolsœ,œidœ:œAgent-9Wf58œ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "selected": false, + "source": "TavilyAISearch-AN1Hv", + "sourceHandle": "{œdataTypeœ: œTavilyAISearchœ, œidœ: œTavilyAISearch-AN1Hvœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-9Wf58", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-9Wf58œ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-RN2Gt", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-9Wf58", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-RN2Gt{œdataTypeœ:œChatInputœ,œidœ:œChatInput-RN2Gtœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-9Wf58{œfieldNameœ:œinput_valueœ,œidœ:œAgent-9Wf58œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-RN2Gt", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-RN2Gtœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-9Wf58", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-9Wf58œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-9Wf58", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "context", + "id": "Prompt-vFWlB", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-9Wf58{œdataTypeœ:œAgentœ,œidœ:œAgent-9Wf58œ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Prompt-vFWlB{œfieldNameœ:œcontextœ,œidœ:œPrompt-vFWlBœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Agent-9Wf58", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-9Wf58œ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-vFWlB", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-vFWlBœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-0R6oq", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-XHfFc", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-0R6oq{œdataTypeœ:œPromptœ,œidœ:œPrompt-0R6oqœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-XHfFc{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-XHfFcœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-0R6oq", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0R6oqœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-XHfFc", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-XHfFcœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-e0DLW", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "post", + "id": "Prompt-Z1RBN", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-e0DLW{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-e0DLWœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-Z1RBN{œfieldNameœ:œpostœ,œidœ:œPrompt-Z1RBNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-e0DLW", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-e0DLWœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-Z1RBN", + "targetHandle": "{œfieldNameœ: œpostœ, œidœ: œPrompt-Z1RBNœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-XHfFc", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "image_description", + "id": "Prompt-Z1RBN", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-XHfFc{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-XHfFcœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-Z1RBN{œfieldNameœ:œimage_descriptionœ,œidœ:œPrompt-Z1RBNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-XHfFc", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-XHfFcœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-Z1RBN", + "targetHandle": "{œfieldNameœ: œimage_descriptionœ, œidœ: œPrompt-Z1RBNœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-Z1RBN", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-1gPMj", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-Z1RBN{œdataTypeœ:œPromptœ,œidœ:œPrompt-Z1RBNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-1gPMj{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-1gPMjœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-Z1RBN", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-Z1RBNœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-1gPMj", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-1gPMjœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "id": "ChatInput-RN2Gt", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Create a Langflow post" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-RN2Gt", + "position": { + "x": 5183.264962599111, + "y": 3024.7129453201533 + }, + "positionAbsolute": { + "x": 5183.264962599111, + "y": 3024.7129453201533 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-vFWlB", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["context", "guidelines"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "context": { + "advanced": false, + "display_name": "context", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "context", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "guidelines": { + "advanced": false, + "display_name": "guidelines", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "guidelines", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "Based on the following context: \n\n{context} \n\n\nFollow these guidelines: \n\n{guidelines}" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 433, + "id": "Prompt-vFWlB", + "position": { + "x": 6013.179772864059, + "y": 2937.851014457363 + }, + "positionAbsolute": { + "x": 6013.179772864059, + "y": 2937.851014457363 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-VURDN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Text Input", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Build a post for Instagram:\n\n1. **Opening Line**: Start with a powerful, intriguing question or statement to grab attention. Example: \"Ever wondered what it feels like to fly?\"\n\n2. **Main Content**: \n - Briefly share a personal story or insight related to the opening line. Keep it engaging and relatable.\n - Include valuable information or a lesson learned that your audience can benefit from.\n\n3. **Emojis**: Integrate emojis naturally within your text to emphasize key points and add a playful tone.\n\n4. **Call to Action (CTA)**: End with a clear CTA. Encourage your audience to share their thoughts, experiences, or to take a specific action. Example: \"Share your dream adventure in the comments! 🌍✈️\"\n\n5. **Hashtags**: Conclude with a selection of relevant hashtags. Place them at the end of your post to maintain focus on your message." + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-VURDN", + "position": { + "x": 5671.190001393486, + "y": 3422.371192525402 + }, + "positionAbsolute": { + "x": 5671.190001393486, + "y": 3422.371192525402 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-e0DLW", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 543, + "id": "OpenAIModel-e0DLW", + "position": { + "x": 6427.182886017446, + "y": 2891.554378731566 + }, + "positionAbsolute": { + "x": 6427.182886017446, + "y": 2891.554378731566 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-0R6oq", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["post"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "post": { + "advanced": false, + "display_name": "post", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "post", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "Based on the following post: \n\n{post} \n\nCraft a compelling prompt for image generator involving a blend of specificity, creativity, and clarity. Begin with a clear, concise description of the subject or scene you envision, incorporating specific details such as the setting, mood, and any key elements that are crucial to your vision. It's important to use descriptive language that conveys not just the visual aspects but also the emotional tone or atmosphere you wish to capture. Modifiers that specify the style, technique, or artistic influences can greatly enhance the prompt, guiding the AI to produce results that align closely with your expectations. Additionally, consider including any particular textures, lighting styles, or perspectives that will help refine the image to your liking. The goal is to provide Leonardo AI with a well-rounded, detailed description that leaves little room for ambiguity, enabling it to generate an image that closely matches your request.\n\nA good prompt should read like a brief to an artist, containing all the necessary information but leaving enough creative freedom for the AI to work effectively. It's a delicate balance between being overly prescriptive and too vague. The inclusion of what to avoid, using negative prompts, can also be helpful in steering the AI away from undesired outcomes. Remember, the effectiveness of a prompt often improves with experimentation and iteration, refining your approach based on the results you receive.\n\nExample 1: \"Create a digital painting of a serene lakeside at dusk, reflecting the vibrant hues of the sunset. The scene should be framed by weeping willows, with a lone wooden rowboat gently bobbing on the water's surface. Aim for a realistic style with a touch of impressionism, focusing on the interplay of light and shadow.\"\n\nExample 2: \"Illustrate a bustling medieval marketplace scene, vibrant and full of life, set within a walled city. Include diverse merchants, from a blacksmith to a spice trader, and townsfolk in period attire. The artwork should capture the dynamic energy of the market, with attention to historical accuracy and rich, earthy colors. Opt for a detailed, digital illustration style that brings out the textures of fabrics, metals, and natural elements.\"" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 347, + "id": "Prompt-0R6oq", + "position": { + "x": 6786.650693383261, + "y": 3042.4668667721307 + }, + "positionAbsolute": { + "x": 6786.650693383261, + "y": 3042.4668667721307 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-1gPMj", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-1gPMj", + "position": { + "x": 7980.617825443558, + "y": 3377.2219674389726 + }, + "positionAbsolute": { + "x": 7980.617825443558, + "y": 3377.2219674389726 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-9Wf58", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a helpful AI assistant. Use the following information from a web search to answer the user's question. If the search results don't contain relevant information, say so and offer to help with something else.\n\n{input}" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-9Wf58", + "position": { + "x": 5665.465212822881, + "y": 2760.0819124193113 + }, + "positionAbsolute": { + "x": 5665.465212822881, + "y": 2760.0819124193113 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "id": "TavilyAISearch-AN1Hv", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "documentation": "https://docs.tavily.com/", + "edited": false, + "field_order": [ + "api_key", + "query", + "search_depth", + "topic", + "max_results", + "include_images", + "include_answer" + ], + "frozen": false, + "icon": "TavilyIcon", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": ["api_key"], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": ["api_key"], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Tavily API Key", + "dynamic": false, + "info": "Your Tavily API Key.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from enum import Enum\n\nimport httpx\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass TavilySearchDepth(Enum):\n BASIC = \"basic\"\n ADVANCED = \"advanced\"\n\n\nclass TavilySearchTopic(Enum):\n GENERAL = \"general\"\n NEWS = \"news\"\n\n\nclass TavilySearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query you want to execute with Tavily.\")\n search_depth: TavilySearchDepth = Field(TavilySearchDepth.BASIC, description=\"The depth of the search.\")\n topic: TavilySearchTopic = Field(TavilySearchTopic.GENERAL, description=\"The category of the search.\")\n max_results: int = Field(5, description=\"The maximum number of search results to return.\")\n include_images: bool = Field(default=False, description=\"Include a list of query-related images in the response.\")\n include_answer: bool = Field(default=False, description=\"Include a short answer to original query.\")\n\n\nclass TavilySearchToolComponent(LCToolComponent):\n display_name = \"Tavily AI Search\"\n description = \"\"\"**Tavily AI** is a search engine optimized for LLMs and RAG, \\\n aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n\"\"\"\n icon = \"TavilyIcon\"\n name = \"TavilyAISearch\"\n documentation = \"https://docs.tavily.com/\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Tavily API Key\",\n required=True,\n info=\"Your Tavily API Key.\",\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The search query you want to execute with Tavily.\",\n ),\n DropdownInput(\n name=\"search_depth\",\n display_name=\"Search Depth\",\n info=\"The depth of the search.\",\n options=list(TavilySearchDepth),\n value=TavilySearchDepth.ADVANCED,\n advanced=True,\n ),\n DropdownInput(\n name=\"topic\",\n display_name=\"Search Topic\",\n info=\"The category of the search.\",\n options=list(TavilySearchTopic),\n value=TavilySearchTopic.GENERAL,\n advanced=True,\n ),\n IntInput(\n name=\"max_results\",\n display_name=\"Max Results\",\n info=\"The maximum number of search results to return.\",\n value=5,\n advanced=True,\n ),\n BoolInput(\n name=\"include_images\",\n display_name=\"Include Images\",\n info=\"Include a list of query-related images in the response.\",\n value=True,\n advanced=True,\n ),\n BoolInput(\n name=\"include_answer\",\n display_name=\"Include Answer\",\n info=\"Include a short answer to original query.\",\n value=True,\n advanced=True,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n # Convert string values to enum instances with validation\n try:\n search_depth_enum = (\n self.search_depth\n if isinstance(self.search_depth, TavilySearchDepth)\n else TavilySearchDepth(str(self.search_depth).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid search depth value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n try:\n topic_enum = (\n self.topic if isinstance(self.topic, TavilySearchTopic) else TavilySearchTopic(str(self.topic).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid topic value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n return self._tavily_search(\n self.query,\n search_depth=search_depth_enum,\n topic=topic_enum,\n max_results=self.max_results,\n include_images=self.include_images,\n include_answer=self.include_answer,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"tavily_search\",\n description=\"Perform a web search using the Tavily API.\",\n func=self._tavily_search,\n args_schema=TavilySearchSchema,\n )\n\n def _tavily_search(\n self,\n query: str,\n *,\n search_depth: TavilySearchDepth = TavilySearchDepth.BASIC,\n topic: TavilySearchTopic = TavilySearchTopic.GENERAL,\n max_results: int = 5,\n include_images: bool = False,\n include_answer: bool = False,\n ) -> list[Data]:\n # Validate enum values\n if not isinstance(search_depth, TavilySearchDepth):\n msg = f\"Invalid search_depth value: {search_depth}\"\n raise TypeError(msg)\n if not isinstance(topic, TavilySearchTopic):\n msg = f\"Invalid topic value: {topic}\"\n raise TypeError(msg)\n\n try:\n url = \"https://api.tavily.com/search\"\n headers = {\n \"content-type\": \"application/json\",\n \"accept\": \"application/json\",\n }\n payload = {\n \"api_key\": self.api_key,\n \"query\": query,\n \"search_depth\": search_depth.value,\n \"topic\": topic.value,\n \"max_results\": max_results,\n \"include_images\": include_images,\n \"include_answer\": include_answer,\n }\n\n with httpx.Client() as client:\n response = client.post(url, json=payload, headers=headers)\n\n response.raise_for_status()\n search_results = response.json()\n\n data_results = [\n Data(\n data={\n \"title\": result.get(\"title\"),\n \"url\": result.get(\"url\"),\n \"content\": result.get(\"content\"),\n \"score\": result.get(\"score\"),\n }\n )\n for result in search_results.get(\"results\", [])\n ]\n\n if include_answer and search_results.get(\"answer\"):\n data_results.insert(0, Data(data={\"answer\": search_results[\"answer\"]}))\n\n if include_images and search_results.get(\"images\"):\n data_results.append(Data(data={\"images\": search_results[\"images\"]}))\n\n self.status = data_results # type: ignore[assignment]\n\n except httpx.HTTPStatusError as e:\n error_message = f\"HTTP error: {e.response.status_code} - {e.response.text}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n except Exception as e:\n error_message = f\"Unexpected error: {e}\"\n logger.opt(exception=True).debug(\"Error running Tavily Search\")\n self.status = error_message\n raise ToolException(error_message) from e\n return data_results\n" + }, + "include_answer": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Answer", + "dynamic": false, + "info": "Include a short answer to original query.", + "list": false, + "name": "include_answer", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "include_images": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Images", + "dynamic": false, + "info": "Include a list of query-related images in the response.", + "list": false, + "name": "include_images", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "max_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Results", + "dynamic": false, + "info": "The maximum number of search results to return.", + "list": false, + "name": "max_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "query": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Search Query", + "dynamic": false, + "info": "The search query you want to execute with Tavily.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "query", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "search_depth": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Depth", + "dynamic": false, + "info": "The depth of the search.", + "load_from_db": false, + "name": "search_depth", + "options": ["basic", "advanced"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "advanced" + }, + "topic": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Topic", + "dynamic": false, + "info": "The category of the search.", + "load_from_db": false, + "name": "topic", + "options": ["general", "news"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "general" + } + }, + "tool_mode": false + }, + "type": "TavilyAISearch" + }, + "dragging": false, + "height": 481, + "id": "TavilyAISearch-AN1Hv", + "position": { + "x": 5178.005987190226, + "y": 3496.7214582697484 + }, + "positionAbsolute": { + "x": 5178.005987190226, + "y": 3496.7214582697484 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-XHfFc", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 543, + "id": "OpenAIModel-XHfFc", + "position": { + "x": 7211.829041441037, + "y": 2938.1023670807563 + }, + "positionAbsolute": { + "x": 7211.829041441037, + "y": 2938.1023670807563 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-Z1RBN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["post", "image_description"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "image_description": { + "advanced": false, + "display_name": "image_description", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "image_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "post": { + "advanced": false, + "display_name": "post", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "post", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "{post}\n \n\n{image_description} " + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 433, + "id": "Prompt-Z1RBN", + "position": { + "x": 7613.837241084599, + "y": 3139.8282595890087 + }, + "positionAbsolute": { + "x": 7613.837241084599, + "y": 3139.8282595890087 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-Jr7lC", + "node": { + "description": "# Instagram Copywriter \n\nWelcome to the Instagram Copywriter! This flow helps you create compelling Instagram posts with AI-generated content and image prompts.\n\n## Instructions\n1. Enter Your Topic\n - In the Chat Input, enter a brief description of the topic you want to post about.\n - Example: \"Create a post about meditation and its benefits\"\n\n2. Review the Generated Content\n - The flow will use AI to research your topic and generate a formatted Instagram post.\n - The post will include an opening line, main content, emojis, a call-to-action, and hashtags.\n\n3. Check the Image Prompt\n - The flow will also generate a detailed image prompt based on your post content.\n - This prompt can be used with image generation tools to create a matching visual.\n\n4. Copy the Final Output\n - The Chat Output will display the complete Instagram post text followed by the image generation prompt.\n - Copy this output to use in your Instagram content creation process.\n\n5. Refine if Needed\n - If you're not satisfied with the result, you can adjust the input or modify the OpenAI model settings for different outputs.\n\nRemember: Keep your initial topic input clear and concise for best results! 🎨✨", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "amber" + } + }, + "type": "note" + }, + "dragging": false, + "height": 648, + "id": "note-Jr7lC", + "position": { + "x": 4492.051129290571, + "y": 2746.336592524463 + }, + "positionAbsolute": { + "x": 4560.051129290571, + "y": 2746.336592524463 + }, + "resizing": false, + "selected": false, + "style": { + "height": 648, + "width": 554 + }, + "type": "noteNode", + "width": 554 + }, + { + "data": { + "id": "note-tWoba", + "node": { + "description": "**Text Input (Guidelines Prompt)**\n - NOTE: \"Contains Instagram post formatting rules. Don't modify this component as it maintains format consistency.\"\n - Maintains fixed guidelines for:\n * Opening structure\n * Main content\n * Emoji usage\n * Call to Action (CTA)\n * Hashtags\n\n4. **First Prompt + OpenAI Sequence**\n - NOTE: \"Generates initial post content following Instagram guidelines\"\n - Settings:\n * Temperature: 0.7 (good balance between creativity and consistency)\n * Input: Receives research context\n * Output: Generates formatted post text\n\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-tWoba", + "position": { + "x": 5667.476249937603, + "y": 3644.9055828357396 + }, + "positionAbsolute": { + "x": 5667.476249937603, + "y": 3644.9055828357396 + }, + "resizing": false, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "note-9ABdX", + "node": { + "description": "**Second Prompt + OpenAI Sequence**\n - NOTE: \"Transforms the generated post into a prompt for image generation\"\n - Settings:\n * Temperature: 0.7\n * Input: Receives generated post\n * Output: Creates detailed description for image generation\n\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-9ABdX", + "position": { + "x": 6786.375917286389, + "y": 3393.8522072000146 + }, + "positionAbsolute": { + "x": 6786.375917286389, + "y": 3393.8522072000146 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "note-aplGD", + "node": { + "description": "**Final Prompt**\n - NOTE: \"Combines Instagram post with image prompt in a final format\"\n - Structure:\n * First part: Complete Instagram post\n * Second part: Image generation prompt\n * Separator: Uses \"**Prompt:**\" to divide sections\n\n7. **Chat Output (Final Output)**\n - NOTE: \"Presents the combined final result that can be copied and used directly\"\n\nGENERAL USAGE TIPS:\n- Keep initial inputs clear and specific\n- Don't modify pre-defined Instagram guidelines\n- If style adjustments are needed, only modify the OpenAI models' temperature\n- Verify all connections are correct before running\n- Final result will always have two parts: post + image prompt\n\nFLOW CONSIDERATIONS:\n- All tools connect only to the Tool Calling Agent\n- The flow is unidirectional (no loops)\n- Each prompt template maintains specific formatting\n- Temperatures are set for optimal creativity/consistency balance\n\nTROUBLESHOOTING NOTES:\n- If output is too creative: Lower temperature", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-aplGD", + "position": { + "x": 7606.419013912975, + "y": 3612.8149429707646 + }, + "positionAbsolute": { + "x": 7606.419013912975, + "y": 3612.8149429707646 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "note-AKIwx", + "node": { + "description": "# 🔑 Tavily AI Search Needs API Key\n\nYou can get 1000 searches/month free [here](https://tavily.com/) ", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "lime" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-AKIwx", + "position": { + "x": 5174.678177457385, + "y": 3339.6628854203204 + }, + "positionAbsolute": { + "x": 5174.678177457385, + "y": 3339.6628854203204 + }, + "selected": false, + "type": "noteNode", + "width": 325 + } + ], + "viewport": { + "x": -1826.8109854875006, + "y": -1023.2049813606079, + "zoom": 0.4417458816510278 + } + }, + "description": " Create engaging Instagram posts with AI-generated content and image prompts, streamlining social media content creation.", + "endpoint_name": null, + "icon": "InstagramIcon", + "id": "4bb309e6-42b4-4565-b960-8bd0f7e431f2", + "gradient": "0", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Instagram Copywriter", + "tags": ["content-generation", "chatbots", "agents"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Market Research.json b/src/backend/base/langflow/initial_setup/starter_projects/Market Research.json new file mode 100644 index 000000000000..01d8fa9e722c --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Market Research.json @@ -0,0 +1,2340 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-1WzgM", + "name": "model_output", + "output_types": ["LanguageModel"] + }, + "targetHandle": { + "fieldName": "llm", + "id": "StructuredOutputComponent-421WY", + "inputTypes": ["LanguageModel"], + "type": "other" + } + }, + "id": "reactflow__edge-OpenAIModel-1WzgM{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-1WzgMœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-StructuredOutputComponent-421WY{œfieldNameœ:œllmœ,œidœ:œStructuredOutputComponent-421WYœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", + "selected": false, + "source": "OpenAIModel-1WzgM", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-1WzgMœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", + "target": "StructuredOutputComponent-421WY", + "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œStructuredOutputComponent-421WYœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "StructuredOutputComponent", + "id": "StructuredOutputComponent-421WY", + "name": "structured_output", + "output_types": ["Data"] + }, + "targetHandle": { + "fieldName": "data", + "id": "ParseData-rO6Qs", + "inputTypes": ["Data"], + "type": "other" + } + }, + "id": "reactflow__edge-StructuredOutputComponent-421WY{œdataTypeœ:œStructuredOutputComponentœ,œidœ:œStructuredOutputComponent-421WYœ,œnameœ:œstructured_outputœ,œoutput_typesœ:[œDataœ]}-ParseData-rO6Qs{œfieldNameœ:œdataœ,œidœ:œParseData-rO6Qsœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "selected": false, + "source": "StructuredOutputComponent-421WY", + "sourceHandle": "{œdataTypeœ: œStructuredOutputComponentœ, œidœ: œStructuredOutputComponent-421WYœ, œnameœ: œstructured_outputœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-rO6Qs", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-rO6Qsœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ParseData", + "id": "ParseData-rO6Qs", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-hBRXA", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ParseData-rO6Qs{œdataTypeœ:œParseDataœ,œidœ:œParseData-rO6Qsœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-hBRXA{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-hBRXAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ParseData-rO6Qs", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-rO6Qsœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-hBRXA", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-hBRXAœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TavilyAISearch", + "id": "TavilyAISearch-ghguc", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-QSS16", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-TavilyAISearch-ghguc{œdataTypeœ:œTavilyAISearchœ,œidœ:œTavilyAISearch-ghgucœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-QSS16{œfieldNameœ:œtoolsœ,œidœ:œAgent-QSS16œ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "selected": false, + "source": "TavilyAISearch-ghguc", + "sourceHandle": "{œdataTypeœ: œTavilyAISearchœ, œidœ: œTavilyAISearch-ghgucœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-QSS16", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-QSS16œ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-1iaFN", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-QSS16", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-1iaFN{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1iaFNœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-QSS16{œfieldNameœ:œinput_valueœ,œidœ:œAgent-QSS16œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-1iaFN", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1iaFNœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-QSS16", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-QSS16œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-QSS16", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "StructuredOutputComponent-421WY", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-QSS16{œdataTypeœ:œAgentœ,œidœ:œAgent-QSS16œ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-StructuredOutputComponent-421WY{œfieldNameœ:œinput_valueœ,œidœ:œStructuredOutputComponent-421WYœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Agent-QSS16", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-QSS16œ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "StructuredOutputComponent-421WY", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œStructuredOutputComponent-421WYœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-1iaFN", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Amazon" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-1iaFN", + "position": { + "x": 472.38251755471583, + "y": 889.8398446936101 + }, + "positionAbsolute": { + "x": 472.38251755471583, + "y": 889.8398446936101 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-hBRXA", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-hBRXA", + "position": { + "x": 2518.282039019285, + "y": 855.3686932779933 + }, + "positionAbsolute": { + "x": 2518.282039019285, + "y": 855.3686932779933 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "id": "TavilyAISearch-ghguc", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "documentation": "https://docs.tavily.com/", + "edited": false, + "field_order": [ + "api_key", + "query", + "search_depth", + "topic", + "max_results", + "include_images", + "include_answer" + ], + "frozen": false, + "icon": "TavilyIcon", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": ["api_key"], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": ["api_key"], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Tavily API Key", + "dynamic": false, + "info": "Your Tavily API Key.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from enum import Enum\n\nimport httpx\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass TavilySearchDepth(Enum):\n BASIC = \"basic\"\n ADVANCED = \"advanced\"\n\n\nclass TavilySearchTopic(Enum):\n GENERAL = \"general\"\n NEWS = \"news\"\n\n\nclass TavilySearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query you want to execute with Tavily.\")\n search_depth: TavilySearchDepth = Field(TavilySearchDepth.BASIC, description=\"The depth of the search.\")\n topic: TavilySearchTopic = Field(TavilySearchTopic.GENERAL, description=\"The category of the search.\")\n max_results: int = Field(5, description=\"The maximum number of search results to return.\")\n include_images: bool = Field(default=False, description=\"Include a list of query-related images in the response.\")\n include_answer: bool = Field(default=False, description=\"Include a short answer to original query.\")\n\n\nclass TavilySearchToolComponent(LCToolComponent):\n display_name = \"Tavily AI Search\"\n description = \"\"\"**Tavily AI** is a search engine optimized for LLMs and RAG, \\\n aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n\"\"\"\n icon = \"TavilyIcon\"\n name = \"TavilyAISearch\"\n documentation = \"https://docs.tavily.com/\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Tavily API Key\",\n required=True,\n info=\"Your Tavily API Key.\",\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The search query you want to execute with Tavily.\",\n ),\n DropdownInput(\n name=\"search_depth\",\n display_name=\"Search Depth\",\n info=\"The depth of the search.\",\n options=list(TavilySearchDepth),\n value=TavilySearchDepth.ADVANCED,\n advanced=True,\n ),\n DropdownInput(\n name=\"topic\",\n display_name=\"Search Topic\",\n info=\"The category of the search.\",\n options=list(TavilySearchTopic),\n value=TavilySearchTopic.GENERAL,\n advanced=True,\n ),\n IntInput(\n name=\"max_results\",\n display_name=\"Max Results\",\n info=\"The maximum number of search results to return.\",\n value=5,\n advanced=True,\n ),\n BoolInput(\n name=\"include_images\",\n display_name=\"Include Images\",\n info=\"Include a list of query-related images in the response.\",\n value=True,\n advanced=True,\n ),\n BoolInput(\n name=\"include_answer\",\n display_name=\"Include Answer\",\n info=\"Include a short answer to original query.\",\n value=True,\n advanced=True,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n # Convert string values to enum instances with validation\n try:\n search_depth_enum = (\n self.search_depth\n if isinstance(self.search_depth, TavilySearchDepth)\n else TavilySearchDepth(str(self.search_depth).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid search depth value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n try:\n topic_enum = (\n self.topic if isinstance(self.topic, TavilySearchTopic) else TavilySearchTopic(str(self.topic).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid topic value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n return self._tavily_search(\n self.query,\n search_depth=search_depth_enum,\n topic=topic_enum,\n max_results=self.max_results,\n include_images=self.include_images,\n include_answer=self.include_answer,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"tavily_search\",\n description=\"Perform a web search using the Tavily API.\",\n func=self._tavily_search,\n args_schema=TavilySearchSchema,\n )\n\n def _tavily_search(\n self,\n query: str,\n *,\n search_depth: TavilySearchDepth = TavilySearchDepth.BASIC,\n topic: TavilySearchTopic = TavilySearchTopic.GENERAL,\n max_results: int = 5,\n include_images: bool = False,\n include_answer: bool = False,\n ) -> list[Data]:\n # Validate enum values\n if not isinstance(search_depth, TavilySearchDepth):\n msg = f\"Invalid search_depth value: {search_depth}\"\n raise TypeError(msg)\n if not isinstance(topic, TavilySearchTopic):\n msg = f\"Invalid topic value: {topic}\"\n raise TypeError(msg)\n\n try:\n url = \"https://api.tavily.com/search\"\n headers = {\n \"content-type\": \"application/json\",\n \"accept\": \"application/json\",\n }\n payload = {\n \"api_key\": self.api_key,\n \"query\": query,\n \"search_depth\": search_depth.value,\n \"topic\": topic.value,\n \"max_results\": max_results,\n \"include_images\": include_images,\n \"include_answer\": include_answer,\n }\n\n with httpx.Client() as client:\n response = client.post(url, json=payload, headers=headers)\n\n response.raise_for_status()\n search_results = response.json()\n\n data_results = [\n Data(\n data={\n \"title\": result.get(\"title\"),\n \"url\": result.get(\"url\"),\n \"content\": result.get(\"content\"),\n \"score\": result.get(\"score\"),\n }\n )\n for result in search_results.get(\"results\", [])\n ]\n\n if include_answer and search_results.get(\"answer\"):\n data_results.insert(0, Data(data={\"answer\": search_results[\"answer\"]}))\n\n if include_images and search_results.get(\"images\"):\n data_results.append(Data(data={\"images\": search_results[\"images\"]}))\n\n self.status = data_results # type: ignore[assignment]\n\n except httpx.HTTPStatusError as e:\n error_message = f\"HTTP error: {e.response.status_code} - {e.response.text}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n except Exception as e:\n error_message = f\"Unexpected error: {e}\"\n logger.opt(exception=True).debug(\"Error running Tavily Search\")\n self.status = error_message\n raise ToolException(error_message) from e\n return data_results\n" + }, + "include_answer": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Answer", + "dynamic": false, + "info": "Include a short answer to original query.", + "list": false, + "name": "include_answer", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "include_images": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Images", + "dynamic": false, + "info": "Include a list of query-related images in the response.", + "list": false, + "name": "include_images", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "max_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Results", + "dynamic": false, + "info": "The maximum number of search results to return.", + "list": false, + "name": "max_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "query": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Search Query", + "dynamic": false, + "info": "The search query you want to execute with Tavily.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "query", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "search_depth": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Depth", + "dynamic": false, + "info": "The depth of the search.", + "load_from_db": false, + "name": "search_depth", + "options": ["basic", "advanced"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "advanced" + }, + "topic": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Topic", + "dynamic": false, + "info": "The category of the search.", + "load_from_db": false, + "name": "topic", + "options": ["general", "news"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "general" + } + }, + "tool_mode": false + }, + "type": "TavilyAISearch" + }, + "dragging": false, + "height": 481, + "id": "TavilyAISearch-ghguc", + "position": { + "x": 928.3794352018558, + "y": 797.2042255200732 + }, + "positionAbsolute": { + "x": 928.3794352018558, + "y": 797.2042255200732 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-Fs2b3", + "node": { + "description": "The StructuredOutputComponent, when utilized with our company information schema, performs the following functions:\n\n1. Accepts an input query regarding a company.\n2. Employs a Language Model (LLM) to analyze the query.\n3. Instructs the LLM to generate a structured response adhering to the predefined schema:\n - Domain\n - LinkedIn URL\n - Cheapest Plan\n - Has Free Trial\n - Has Enterprise Plan\n - Has API\n - Market\n - Pricing Tiers\n - Key Features\n - Target Industries\n\n4. Validates the LLM output against this schema.\n5. Returns a Data object containing the company information structured according to the schema.\n\nIn essence, this component transforms a free-text query about a company into a structured, consistent dataset, facilitating subsequent analysis and application of the information.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 403, + "id": "note-Fs2b3", + "position": { + "x": 2089.5869930853464, + "y": 311.41660832449514 + }, + "positionAbsolute": { + "x": 2089.5869930853464, + "y": 311.41660832449514 + }, + "resizing": false, + "selected": false, + "style": { + "height": 403, + "width": 461 + }, + "type": "noteNode", + "width": 461 + }, + { + "data": { + "id": "note-BfWWs", + "node": { + "description": "PURPOSE:\nConverts unstructured company research into standardized JSON format\n\nKEY FUNCTIONS:\n- Extracts specific business data points\n- Validates and formats information\n- Ensures data consistency\n\nINPUT:\n- Raw company research data\n\nOUTPUT:\nStructured JSON with:\n- Domain information\n- Social links\n- Pricing details\n- Feature availability\n- Market classification\n- Product features\n- Industry focus\n\nRULES:\n1. Uses strict boolean values\n2. Standardizes pricing formats\n3. Validates market categories\n4. Handles missing data consistently", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 382, + "id": "note-BfWWs", + "position": { + "x": 1237.6627823432912, + "y": 111.53860932079613 + }, + "positionAbsolute": { + "x": 1237.6627823432912, + "y": 169.53860932079613 + }, + "resizing": false, + "selected": false, + "style": { + "height": 382, + "width": 398 + }, + "type": "noteNode", + "width": 398 + }, + { + "data": { + "id": "note-iNLPA", + "node": { + "description": "# Market Research\nThis flow helps you gather comprehensive information about companies for sales and business intelligence purposes.\n\n## Instructions\n\n1. Enter Company Name\n - In the Chat Input node, type the name of the company you want to research\n - Example inputs: \"Salesforce.com\", \"Shopify\", \"Zoom Video Communications\"\n\n2. Initiate Research\n - The Agent will use the Tavily AI Search tool to gather information\n - It will focus on key areas like pricing, features, and market positioning\n\n3. Review Structured Output\n - The flow will generate a structured JSON output with standardized fields\n - This includes domain, LinkedIn URL, pricing details, and key features\n\n4. Examine Formatted Results\n - The Parse Data component will convert the JSON into a readable format\n - You'll see a comprehensive company profile with organized sections\n\n5. Analyze and Use Data\n - Use the generated information for sales prospecting, competitive analysis, or market research\n - The structured format allows for easy comparison between different companies\n\nRemember: Always verify critical information from official sources before making business decisions! 🔍💼", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "emerald" + } + }, + "type": "note" + }, + "dragging": false, + "height": 513, + "id": "note-iNLPA", + "position": { + "x": 244.92297036777086, + "y": 340.99805740871204 + }, + "positionAbsolute": { + "x": 244.92297036777086, + "y": 340.99805740871204 + }, + "resizing": false, + "selected": true, + "style": { + "height": 513, + "width": 567 + }, + "type": "noteNode", + "width": 567 + }, + { + "data": { + "description": "Transforms LLM responses into **structured data formats**. Ideal for extracting specific information or creating consistent outputs.", + "display_name": "Structured Output", + "id": "StructuredOutputComponent-421WY", + "node": { + "base_classes": ["Data"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Transforms LLM responses into **structured data formats**. Ideal for extracting specific information or creating consistent outputs.", + "display_name": "Structured Output", + "documentation": "", + "edited": false, + "field_order": [ + "llm", + "input_value", + "schema_name", + "output_schema", + "multiple" + ], + "frozen": false, + "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Structured Output", + "method": "build_structured_output", + "name": "structured_output", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import cast\n\nfrom pydantic import BaseModel, Field, create_model\n\nfrom langflow.base.models.chat_result import get_chat_result\nfrom langflow.custom import Component\nfrom langflow.field_typing.constants import LanguageModel\nfrom langflow.helpers.base_model import build_model_from_schema\nfrom langflow.io import BoolInput, HandleInput, MessageTextInput, Output, StrInput, TableInput\nfrom langflow.schema.data import Data\n\n\nclass StructuredOutputComponent(Component):\n display_name = \"Structured Output\"\n description = (\n \"Transforms LLM responses into **structured data formats**. Ideal for extracting specific information \"\n \"or creating consistent outputs.\"\n )\n icon = \"braces\"\n\n inputs = [\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"The language model to use to generate the structured output.\",\n input_types=[\"LanguageModel\"],\n ),\n MessageTextInput(name=\"input_value\", display_name=\"Input message\"),\n StrInput(\n name=\"schema_name\",\n display_name=\"Schema Name\",\n info=\"Provide a name for the output data schema.\",\n ),\n TableInput(\n name=\"output_schema\",\n display_name=\"Output Schema\",\n info=\"Define the structure and data types for the model's output.\",\n table_schema=[\n {\n \"name\": \"name\",\n \"display_name\": \"Name\",\n \"type\": \"str\",\n \"description\": \"Specify the name of the output field.\",\n },\n {\n \"name\": \"description\",\n \"display_name\": \"Description\",\n \"type\": \"str\",\n \"description\": \"Describe the purpose of the output field.\",\n },\n {\n \"name\": \"type\",\n \"display_name\": \"Type\",\n \"type\": \"str\",\n \"description\": (\n \"Indicate the data type of the output field \" \"(e.g., str, int, float, bool, list, dict).\"\n ),\n \"default\": \"text\",\n },\n {\n \"name\": \"multiple\",\n \"display_name\": \"Multiple\",\n \"type\": \"boolean\",\n \"description\": \"Set to True if this output field should be a list of the specified type.\",\n \"default\": \"False\",\n },\n ],\n ),\n BoolInput(\n name=\"multiple\",\n display_name=\"Generate Multiple\",\n info=\"Set to True if the model should generate a list of outputs instead of a single output.\",\n ),\n ]\n\n outputs = [\n Output(name=\"structured_output\", display_name=\"Structured Output\", method=\"build_structured_output\"),\n ]\n\n def build_structured_output(self) -> Data:\n if not hasattr(self.llm, \"with_structured_output\"):\n msg = \"Language model does not support structured output.\"\n raise TypeError(msg)\n if not self.output_schema:\n msg = \"Output schema cannot be empty\"\n raise ValueError(msg)\n\n _output_model = build_model_from_schema(self.output_schema)\n if self.multiple:\n output_model = create_model(\n self.schema_name,\n objects=(list[_output_model], Field(description=f\"A list of {self.schema_name}.\")), # type: ignore[valid-type]\n )\n else:\n output_model = _output_model\n try:\n llm_with_structured_output = cast(LanguageModel, self.llm).with_structured_output(schema=output_model) # type: ignore[valid-type, attr-defined]\n\n except NotImplementedError as exc:\n msg = f\"{self.llm.__class__.__name__} does not support structured output.\"\n raise TypeError(msg) from exc\n config_dict = {\n \"run_name\": self.display_name,\n \"project_name\": self.get_project_name(),\n \"callbacks\": self.get_langchain_callbacks(),\n }\n output = get_chat_result(runnable=llm_with_structured_output, input_value=self.input_value, config=config_dict)\n if isinstance(output, BaseModel):\n output_dict = output.model_dump()\n else:\n msg = f\"Output should be a Pydantic BaseModel, got {type(output)} ({output})\"\n raise TypeError(msg)\n return Data(data=output_dict)\n" + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input message", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "llm": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Language Model", + "dynamic": false, + "info": "The language model to use to generate the structured output.", + "input_types": ["LanguageModel"], + "list": false, + "name": "llm", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "multiple": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Generate Multiple", + "dynamic": false, + "info": "Set to True if the model should generate a list of outputs instead of a single output.", + "list": false, + "name": "multiple", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "output_schema": { + "_input_type": "TableInput", + "advanced": false, + "display_name": "Output Schema", + "dynamic": false, + "info": "Define the structure and data types for the model's output.", + "is_list": true, + "load_from_db": false, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "table_schema": { + "columns": [ + { + "description": "Specify the name of the output field.", + "display_name": "Name", + "filterable": true, + "formatter": "text", + "name": "name", + "sortable": true, + "type": "text" + }, + { + "description": "Describe the purpose of the output field.", + "display_name": "Description", + "filterable": true, + "formatter": "text", + "name": "description", + "sortable": true, + "type": "text" + }, + { + "default": "text", + "description": "Indicate the data type of the output field (e.g., str, int, float, bool, list, dict).", + "display_name": "Type", + "filterable": true, + "formatter": "text", + "name": "type", + "sortable": true, + "type": "text" + }, + { + "default": "False", + "description": "Set to True if this output field should be a list of the specified type.", + "display_name": "Multiple", + "filterable": true, + "formatter": "text", + "name": "multiple", + "sortable": true, + "type": "boolean" + } + ] + }, + "title_case": false, + "trace_as_metadata": true, + "type": "table", + "value": [ + { + "description": "Primary company domain name", + "multiple": "False", + "name": "domain", + "type": "text" + }, + { + "description": "Company's LinkedIn URL", + "multiple": "False", + "name": "linkedinUrl", + "type": "text" + }, + { + "description": "Lowest priced plan in USD (number only)", + "multiple": "False", + "name": "cheapestPlan", + "type": "text" + }, + { + "description": "Boolean indicating if they offer a free trial", + "multiple": "False", + "name": "hasFreeTrial", + "type": "bool" + }, + { + "description": "Boolean indicating if they have enterprise options", + "multiple": "False", + "name": "hasEnterprisePlan", + "type": "bool" + }, + { + "description": "Boolean indicating if they offer API access", + "multiple": "False", + "name": "hasAPI", + "type": "bool" + }, + { + "description": "Either 'B2B' or 'B2C' or 'Both", + "multiple": "False", + "name": "market", + "type": "text" + }, + { + "description": "List of available pricing tiers", + "multiple": "True", + "name": "pricingTiers", + "type": "text" + }, + { + "description": "List of main features", + "multiple": "True", + "name": "KeyFeatures", + "type": "text" + }, + { + "description": "List of target industries", + "multiple": "True", + "name": "targetIndustries", + "type": "text" + } + ] + }, + "schema_name": { + "_input_type": "StrInput", + "advanced": false, + "display_name": "Schema Name", + "dynamic": false, + "info": "Provide a name for the output data schema.", + "list": false, + "load_from_db": false, + "name": "schema_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "output_schema" + } + }, + "tool_mode": false + }, + "type": "StructuredOutputComponent" + }, + "dragging": false, + "height": 541, + "id": "StructuredOutputComponent-421WY", + "position": { + "x": 1770.7096106546323, + "y": 518.8182475390113 + }, + "positionAbsolute": { + "x": 1770.7096106546323, + "y": 518.8182475390113 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-1WzgM", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 543, + "id": "OpenAIModel-1WzgM", + "position": { + "x": 1631.501592565594, + "y": 1086.5741650125892 + }, + "positionAbsolute": { + "x": 1631.501592565594, + "y": 1086.5741650125892 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ParseData-rO6Qs", + "node": { + "base_classes": ["Message"], + "beta": false, + "category": "helpers", + "conditional_paths": [], + "custom_fields": {}, + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "documentation": "", + "edited": false, + "field_order": ["data", "template", "sep"], + "frozen": false, + "icon": "braces", + "key": "ParseData", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "parse_data", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + }, + "data": { + "_input_type": "DataInput", + "advanced": false, + "display_name": "Data", + "dynamic": false, + "info": "The data to convert to text.", + "input_types": ["Data"], + "list": false, + "name": "data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "sep": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "Separator", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "sep", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "\n" + }, + "template": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "# Company Profile\n\n## Basic Information\n- **Domain:** {domain}\n- **LinkedIn URL:** {linkedinUrl}\n\n## Pricing and Plans\n- **Cheapest Plan:** {cheapestPlan}\n- **Has Free Trial:** {hasFreeTrial}\n- **Has Enterprise Plan:** {hasEnterprisePlan}\n\n## Technical Capabilities\n- **Has API:** {hasAPI}\n\n## Market and Target Audience\n- **Market:** {market}\n- **Target Industries:** {targetIndustries}\n\n## Pricing Structure\n{pricingTiers}\n\n## Key Features\n{KeyFeatures}\n" + } + } + }, + "type": "ParseData" + }, + "dragging": false, + "height": 302, + "id": "ParseData-rO6Qs", + "position": { + "x": 2139.05558520377, + "y": 780.6849187394922 + }, + "positionAbsolute": { + "x": 2139.05558520377, + "y": 780.6849187394922 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-QSS16", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are an expert business research agent. Your task is to gather comprehensive information about companies. When researching a company, focus on the following key areas: 1. Basic company information (website, domain, social presence) 2. Product and pricing information 3. Technical capabilities and integrations 4. Market positioning and target audience 5. Key features and offerings For the company/domain provided, search thoroughly and provide detailed information about: - Their main website and domain - Their pricing structure - Product features and capabilities - Market presence and focus - Technical offerings like APIs - Social media presence, especially LinkedIn Search comprehensively and provide detailed, factual information that will help determine: - Pricing tiers and structure - Whether they offer free trials - If they have enterprise solutions - Their technical capabilities - Their primary market (B2B/B2C) INPUT: {input} Respond with detailed, factual information about these aspects, avoiding speculation. Include direct quotes or specific information you find." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-QSS16", + "position": { + "x": 1287.5681517817056, + "y": 519.8701526087884 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-uNkbX", + "node": { + "description": "# 🔑 Tavily AI Search Needs API Key\n\nYou can get 1000 searches/month free [here](https://tavily.com/) ", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "lime" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-uNkbX", + "position": { + "x": 921.6062384772317, + "y": 642.1140062279873 + }, + "positionAbsolute": { + "x": 921.6062384772317, + "y": 642.1140062279873 + }, + "selected": false, + "type": "noteNode", + "width": 325 + } + ], + "viewport": { + "x": -56.31509213451284, + "y": 23.784977680322072, + "zoom": 0.5068195797812174 + } + }, + "description": "Researches companies, extracts key business data, and presents structured information for efficient analysis. ", + "endpoint_name": null, + "icon": "PieChart", + "id": "153a05e5-86bd-4de8-b159-2cb4f9f94de5", + "is_component": false, + "gradient": "1", + "last_tested_version": "1.0.19.post2", + "name": "Market Research", + "tags": ["assistants", "agents"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index 089b84c536ea..407e46ddf71f 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -2,300 +2,182 @@ "data": { "edges": [ { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-6yuNd", - "name": "message", - "output_types": [ - "Message" - ] + "dataType": "OpenAIModel", + "id": "OpenAIModel-4aid4", + "name": "text_output", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "user_message", - "id": "Prompt-tifRl", - "inputTypes": [ - "Message", - "Text" - ], + "fieldName": "input_value", + "id": "ChatOutput-CL8qC", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-ChatInput-6yuNd{œdataTypeœ:œChatInputœ,œidœ:œChatInput-6yuNdœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-tifRl{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-tifRlœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-6yuNd", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-6yuNdœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-tifRl", - "targetHandle": "{œfieldNameœ: œuser_messageœ, œidœ: œPrompt-tifRlœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-4aid4{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-4aid4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-CL8qC{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-CL8qCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-4aid4", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-4aid4œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-CL8qC", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-CL8qCœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-tifRl", - "name": "prompt", - "output_types": [ - "Message" - ] + "dataType": "ChatInput", + "id": "ChatInput-wnAdG", + "name": "message", + "output_types": ["Message"] }, "targetHandle": { "fieldName": "input_value", - "id": "OpenAIModel-ZIeE0", - "inputTypes": [ - "Message" - ], + "id": "OpenAIModel-4aid4", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-Prompt-tifRl{œdataTypeœ:œPromptœ,œidœ:œPrompt-tifRlœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-ZIeE0{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-ZIeE0œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-tifRl", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-tifRlœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-ZIeE0", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-ZIeE0œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ChatInput-wnAdG{œdataTypeœ:œChatInputœ,œidœ:œChatInput-wnAdGœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-4aid4{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-4aid4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-wnAdG", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-wnAdGœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-4aid4", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-4aid4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-ZIeE0", - "name": "text_output", - "output_types": [ - "Message" - ] + "dataType": "Memory", + "id": "Memory-dsJro", + "name": "messages_text", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-c3v9q", - "inputTypes": [ - "Message" - ], + "fieldName": "memory", + "id": "Prompt-JvUF7", + "inputTypes": ["Message", "Text"], "type": "str" } }, - "id": "reactflow__edge-OpenAIModel-ZIeE0{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-ZIeE0œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-c3v9q{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-c3v9qœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-ZIeE0", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-ZIeE0œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-c3v9q", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-c3v9qœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Memory-dsJro{œdataTypeœ:œMemoryœ,œidœ:œMemory-dsJroœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-JvUF7{œfieldNameœ:œmemoryœ,œidœ:œPrompt-JvUF7œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-dsJro", + "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-dsJroœ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-JvUF7", + "targetHandle": "{œfieldNameœ: œmemoryœ, œidœ: œPrompt-JvUF7œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { "className": "", "data": { "sourceHandle": { - "dataType": "Memory", - "id": "Memory-6s5g1", - "name": "messages_text", - "output_types": [ - "Message" - ] + "dataType": "Prompt", + "id": "Prompt-JvUF7", + "name": "prompt", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "context", - "id": "Prompt-tifRl", - "inputTypes": [ - "Message", - "Text" - ], + "fieldName": "system_message", + "id": "OpenAIModel-4aid4", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-Memory-6s5g1{œdataTypeœ:œMemoryœ,œidœ:œMemory-6s5g1œ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-tifRl{œfieldNameœ:œcontextœ,œidœ:œPrompt-tifRlœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "Memory-6s5g1", - "sourceHandle": "{œdataTypeœ: œMemoryœ, œidœ: œMemory-6s5g1œ, œnameœ: œmessages_textœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-tifRl", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-tifRlœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-JvUF7{œdataTypeœ:œPromptœ,œidœ:œPrompt-JvUF7œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-4aid4{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-4aid4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-JvUF7", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-JvUF7œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-4aid4", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-4aid4œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" } ], "nodes": [ { "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-tifRl", + "id": "ChatInput-wnAdG", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], - "custom_fields": { - "template": [ - "context", - "user_message" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", "documentation": "", "edited": false, "field_order": [ - "template" + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" ], "frozen": false, - "icon": "prompts", + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", + "display_name": "Message", + "method": "message_response", + "name": "message", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "code": { + "background_color": { + "_input_type": "MessageTextInput", "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "context": { - "advanced": false, - "display_name": "context", + "display_name": "Background Color", "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], + "info": "The background color of the icon.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "multiline": true, - "name": "context", - "password": false, + "name": "background_color", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" }, - "template": { - "advanced": false, - "display_name": "Template", + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", "dynamic": false, - "info": "", + "info": "The icon of the message.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "template", + "name": "chat_icon", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, - "type": "prompt", - "value": "{context}\n\nUser: {user_message}\nAI: " - }, - "user_message": { - "advanced": false, - "display_name": "user_message", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "user_message", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, + "trace_as_metadata": true, "type": "str", "value": "" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 517, - "id": "Prompt-tifRl", - "position": { - "x": 1880.8227904110583, - "y": 625.8049209882275 - }, - "positionAbsolute": { - "x": 1880.8227904110583, - "y": 625.8049209882275 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "id": "ChatInput-6yuNd", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get chat inputs from the Playground.", - "display_name": "Chat Input", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "files" - ], - "frozen": false, - "icon": "ChatInput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", + }, "code": { "advanced": true, "dynamic": true, @@ -312,9 +194,10 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { + "_input_type": "FileInput", "advanced": true, "display_name": "Files", "dynamic": false, @@ -356,13 +239,12 @@ "value": "" }, "input_value": { + "_input_type": "MultilineInput", "advanced": false, "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "multiline": true, @@ -371,37 +253,37 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "what is my name" }, "sender": { + "_input_type": "DropdownInput", "advanced": true, + "combobox": false, "display_name": "Sender Type", "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": [ - "Machine", - "User" - ], + "options": ["Machine", "User"], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", "value": "User" }, "sender_name": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "sender_name", @@ -409,19 +291,19 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "User" }, "session_id": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "session_id", @@ -429,6 +311,7 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", @@ -449,105 +332,131 @@ "trace_as_metadata": true, "type": "bool", "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" } - } + }, + "tool_mode": false }, "type": "ChatInput" }, "dragging": false, - "height": 309, - "id": "ChatInput-6yuNd", + "height": 234, + "id": "ChatInput-wnAdG", "position": { - "x": 1275.9262193671882, - "y": 836.1228056896347 + "x": 2321.5543981677606, + "y": 374.0457826421628 }, "positionAbsolute": { - "x": 1275.9262193671882, - "y": 836.1228056896347 + "x": 2321.5543981677606, + "y": 374.0457826421628 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-ZIeE0", + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-CL8qC", "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", "documentation": "", "edited": false, "field_order": [ "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" ], "frozen": false, - "icon": "OpenAI", + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", + "display_name": "Message", + "method": "message_response", + "name": "message", "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", - "value": "OPENAI_API_KEY" + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" }, "code": { "advanced": true, @@ -565,250 +474,295 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, - "input_value": { - "advanced": false, - "display_name": "Input", + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "input_value", + "name": "data_template", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "{text}" }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", + "info": "Message to be passed as output.", + "input_types": ["Message"], "list": false, - "name": "json_mode", + "load_from_db": false, + "name": "input_value", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", + "type": "str", "value": "" }, - "model_kwargs": { + "sender": { + "_input_type": "DropdownInput", "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", + "combobox": false, + "display_name": "Sender Type", "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "gpt-4o" + "value": "Machine" }, - "openai_api_base": { + "sender_name": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "OpenAI API Base", + "display_name": "Sender Name", "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "info": "Name of the sender.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "openai_api_base", + "name": "sender_name", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "value": "AI" }, - "seed": { + "session_id": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Seed", + "display_name": "Session ID", "dynamic": false, - "info": "The seed controls the reproducibility of the job.", + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], "list": false, - "name": "seed", + "load_from_db": false, + "name": "session_id", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", - "value": 1 + "type": "str", + "value": "" }, - "stream": { + "should_store_message": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Stream", + "display_name": "Store Messages", "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", + "info": "Store the message in the history.", "list": false, - "name": "stream", + "name": "should_store_message", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "bool", - "value": false + "value": true }, - "system_message": { + "text_color": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "System Message", + "display_name": "Text Color", "dynamic": false, - "info": "System message to pass to the model.", + "info": "The text color of the name", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "system_message", + "name": "text_color", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 } - } + }, + "tool_mode": false }, - "type": "OpenAIModel" + "type": "ChatOutput" }, "dragging": false, - "height": 623, - "id": "OpenAIModel-ZIeE0", + "height": 234, + "id": "ChatOutput-CL8qC", "position": { - "x": 2468.968379487559, - "y": 560.0689522326683 + "x": 3101.965731391458, + "y": 776.4408905693839 }, "positionAbsolute": { - "x": 2468.968379487559, - "y": 560.0689522326683 + "x": 3101.965731391458, + "y": 776.4408905693839 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-c3v9q", + "id": "note-4FQUh", "node": { - "base_classes": [ - "Message" - ], + "description": "# Memory Chatbot\n\nA flexible chatbot implementation featuring advanced conversation memory capabilities. This serves as a foundational tool for building chat experiences with persistent context.\n\n## Core Components\n\n1. **Chat Input**\n - Accepts user messages\n - Configures conversation storage\n - Tracks session identity\n\n2. **Chat Memory**\n - Stores and retrieves up to 100 previous messages\n - Maintains conversation context\n - Tracks separate chat sessions\n - Preserves sender information and message order\n\n3. **Prompt**\n - Creates dynamic prompt templates\n - Integrates memory into conversation flow\n\n4. **OpenAI**\n - Processes user input with context\n - Accesses conversation history\n - Includes options for model configuration and API key setup\n\n5. **Chat Output**\n - Displays formatted responses\n - Maintains conversation flow\n - Syncs with memory storage\n\n## Memory Features\n\n- Stores message history\n- Plans conversation trajectory\n- Differentiates between chat sessions\n- Preserves sender and message metadata\n\n## Quick Start\n\n1. **Initialize** with a clear session ID\n2. **Enter** message in Chat Input\n3. **AI Processes** with context from memory\n4. **Response** appears in Chat Output\n5. Context remains available for follow-ups\n\nThis robust system demonstrates thorough memory integration with minimal complexity. 🧠💬\n", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 736, + "id": "note-4FQUh", + "position": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "positionAbsolute": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "resizing": false, + "selected": false, + "style": { + "height": 736, + "width": 382 + }, + "type": "noteNode", + "width": 382 + }, + { + "data": { + "id": "note-5ZvaH", + "node": { + "description": "## Get Your OpenAI API Key\n\n**Steps**:\n\n1. **Visit** [OpenAI's API Key Page](https://platform.openai.com/api-keys).\n\n2. **Log In/Sign Up**:\n - Log in or create a new OpenAI account.\n\n3. **Generate API Key**:\n - Click \"Create New Secret Key\" to obtain your key.\n\n4. **Store Your Key Securely**:\n - Note it down as it will only display once.\n\n5. **Enter API Key**:\n - Input your key in the OpenAI API Key field within the component setup.\n\nKeep your key safe and manage it responsibly!", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-5ZvaH", + "position": { + "x": 2727.7060397092964, + "y": 115.42518754847691 + }, + "positionAbsolute": { + "x": 2727.7060397092964, + "y": 115.42518754847691 + }, + "selected": false, + "type": "noteNode", + "width": 325 + }, + { + "data": { + "id": "OpenAIModel-4aid4", + "node": { + "base_classes": ["LanguageModel", "Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", "documentation": "", "edited": false, "field_order": [ "input_value", - "store_message", - "sender", - "sender_name", - "session_id", - "data_template" + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" ], "frozen": false, - "icon": "ChatOutput", + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, "code": { "advanced": true, "dynamic": true, @@ -825,19 +779,18 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" }, - "data_template": { - "advanced": true, - "display_name": "Data Template", + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], + "info": "", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "data_template", + "name": "input_value", "placeholder": "", "required": false, "show": true, @@ -845,127 +798,231 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "{text}" + "value": "" }, - "input_value": { - "advanced": false, - "display_name": "Text", + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], + "info": "If True, it will output JSON regardless of passing a schema.", "list": false, - "load_from_db": false, - "name": "input_value", + "name": "json_mode", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", "value": "" }, - "sender": { + "model_kwargs": { + "_input_type": "DictInput", "advanced": true, - "display_name": "Sender Type", + "display_name": "Model Kwargs", "dynamic": false, - "info": "Type of sender.", - "name": "sender", + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", "options": [ - "Machine", - "User" + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" ], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "Machine" + "value": "gpt-4o-mini" }, - "sender_name": { + "openai_api_base": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Sender Name", + "display_name": "OpenAI API Base", "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", "list": false, "load_from_db": false, - "name": "sender_name", + "name": "openai_api_base", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "AI" + "value": "" }, - "session_id": { + "output_parser": { + "_input_type": "HandleInput", "advanced": true, - "display_name": "Session ID", + "display_name": "Output Parser", "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "session_id", + "name": "system_message", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "should_store_message": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Store Messages", + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", "dynamic": false, - "info": "Store the message in the history.", + "info": "", "list": false, - "name": "should_store_message", + "name": "temperature", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "bool", - "value": true + "type": "float", + "value": 0.1 } - } + }, + "tool_mode": false }, - "type": "ChatOutput" + "type": "OpenAIModel" }, - "height": 385, - "id": "ChatOutput-c3v9q", + "dragging": false, + "height": 630, + "id": "OpenAIModel-4aid4", "position": { - "x": 3083.1710516244116, - "y": 701.521688846004 + "x": 2730.3374000311414, + "y": 465.98757723618604 + }, + "positionAbsolute": { + "x": 2730.3374000311414, + "y": 465.98757723618604 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Retrieves stored chat messages from Langflow tables or an external memory.", - "display_name": "Chat Memory", - "id": "Memory-6s5g1", + "id": "Memory-dsJro", "node": { - "base_classes": [ - "BaseChatMemory", - "Data", - "Message" - ], + "base_classes": ["Data", "Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -984,39 +1041,27 @@ ], "frozen": false, "icon": "message-square-more", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Messages (Data)", + "display_name": "Data", "method": "retrieve_messages", "name": "messages", "selected": "Data", - "types": [ - "Data" - ], + "types": ["Data"], "value": "__UNDEFINED__" }, { "cache": true, - "display_name": "Messages (Text)", + "display_name": "Text", "method": "retrieve_messages_as_text", "name": "messages_text", "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Memory", - "method": "build_lc_memory", - "name": "lc_memory", - "selected": "BaseChatMemory", - "types": [ - "BaseChatMemory" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], @@ -1039,16 +1084,15 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" + "value": "from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Message History\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Text\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n chat_memory = self.memory or LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" }, "memory": { + "_input_type": "HandleInput", "advanced": false, "display_name": "External Memory", "dynamic": false, "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", - "input_types": [ - "BaseChatMessageHistory" - ], + "input_types": ["BaseChatMessageHistory"], "list": false, "name": "memory", "placeholder": "", @@ -1060,6 +1104,7 @@ "value": "" }, "n_messages": { + "_input_type": "IntInput", "advanced": true, "display_name": "Number of Messages", "dynamic": false, @@ -1075,50 +1120,48 @@ "value": 100 }, "order": { + "_input_type": "DropdownInput", "advanced": true, + "combobox": false, "display_name": "Order", "dynamic": false, "info": "Order of the messages.", "name": "order", - "options": [ - "Ascending", - "Descending" - ], + "options": ["Ascending", "Descending"], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", "value": "Ascending" }, "sender": { + "_input_type": "DropdownInput", "advanced": true, + "combobox": false, "display_name": "Sender Type", "dynamic": false, "info": "Filter by sender type.", "name": "sender", - "options": [ - "Machine", - "User", - "Machine and User" - ], + "options": ["Machine", "User", "Machine and User"], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", "value": "Machine and User" }, "sender_name": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Sender Name", "dynamic": false, "info": "Filter by sender name.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "sender_name", @@ -1126,19 +1169,19 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "session_id": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "session_id", @@ -1146,19 +1189,19 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "template": { + "_input_type": "MultilineInput", "advanced": true, "display_name": "Template", "dynamic": false, "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "multiline": true, @@ -1167,41 +1210,162 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "{sender_name}: {text}" } - } + }, + "tool_mode": false }, "type": "Memory" }, "dragging": false, - "height": 387, - "id": "Memory-6s5g1", + "height": 264, + "id": "Memory-dsJro", + "position": { + "x": 1947.7805399474369, + "y": 766.1115984799474 + }, + "positionAbsolute": { + "x": 1947.7805399474369, + "y": 766.1115984799474 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "Prompt-JvUF7", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["memory"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "memory": { + "advanced": false, + "display_name": "memory", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a helpful assistant that answer questions.\n\nUse markdown to format your answer, properly embedding images and urls.\n\nHistory: \n\n{memory}\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 347, + "id": "Prompt-JvUF7", "position": { - "x": 1301.98330242754, - "y": 422.33865605652574 + "x": 2327.422938009026, + "y": 675.992123914672 }, "positionAbsolute": { - "x": 1301.98330242754, - "y": 422.33865605652574 + "x": 2327.422938009026, + "y": 675.992123914672 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 } ], "viewport": { - "x": -377.45799796990354, - "y": 18.161555190942522, - "zoom": 0.45494095964690673 + "x": -982.1139274661589, + "y": -37.00115573852611, + "zoom": 0.6716071677128489 } }, - "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", + "description": "Create a chatbot that saves and references previous messages, enabling the model to maintain context throughout the conversation.", "endpoint_name": null, - "id": "ff6810d0-1d7b-4455-9b35-c9f54f7d63b6", + "icon": "MessagesSquare", + "id": "7d334df6-6cf5-4d09-b6bf-169247b20446", + "gradient": "4", "is_component": false, - "last_tested_version": "1.0.9", - "name": "Memory Chatbot" -} \ No newline at end of file + "last_tested_version": "1.0.19.post2", + "name": "Memory Chatbot", + "tags": ["chatbots", "openai", "assistants"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Research Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Research Agent.json new file mode 100644 index 000000000000..10a33260dcb7 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Research Agent.json @@ -0,0 +1,2739 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-Rc3MO", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "previous_response", + "id": "Prompt-u7GZR", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-Rc3MO{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Rc3MOœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-Prompt-u7GZR{œfieldNameœ:œprevious_responseœ,œidœ:œPrompt-u7GZRœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-Rc3MO", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-Rc3MOœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-u7GZR", + "targetHandle": "{œfieldNameœ: œprevious_responseœ, œidœ: œPrompt-u7GZRœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-yDDjW", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-zhgF5", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-yDDjW{œdataTypeœ:œPromptœ,œidœ:œPrompt-yDDjWœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-zhgF5{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-zhgF5œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt-yDDjW", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-yDDjWœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-zhgF5", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-zhgF5œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-Mzp4f", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Prompt-yDDjW", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-Mzp4f{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Mzp4fœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-yDDjW{œfieldNameœ:œinput_valueœ,œidœ:œPrompt-yDDjWœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-Mzp4f", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Mzp4fœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-yDDjW", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œPrompt-yDDjWœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-zhgF5", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-mWv8X", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-zhgF5{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-zhgF5œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-mWv8X{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-mWv8Xœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-zhgF5", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-zhgF5œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-mWv8X", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-mWv8Xœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TavilyAISearch", + "id": "TavilyAISearch-rI4aD", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-9E8IU", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-TavilyAISearch-rI4aD{œdataTypeœ:œTavilyAISearchœ,œidœ:œTavilyAISearch-rI4aDœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-9E8IU{œfieldNameœ:œtoolsœ,œidœ:œAgent-9E8IUœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "selected": false, + "source": "TavilyAISearch-rI4aD", + "sourceHandle": "{œdataTypeœ: œTavilyAISearchœ, œidœ: œTavilyAISearch-rI4aDœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-9E8IU", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-9E8IUœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-u7GZR", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-9E8IU", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-u7GZR{œdataTypeœ:œPromptœ,œidœ:œPrompt-u7GZRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-Agent-9E8IU{œfieldNameœ:œinput_valueœ,œidœ:œAgent-9E8IUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt-u7GZR", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-u7GZRœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-9E8IU", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-9E8IUœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-9E8IU", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "search_results", + "id": "Prompt-yDDjW", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-9E8IU{œdataTypeœ:œAgentœ,œidœ:œAgent-9E8IUœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Prompt-yDDjW{œfieldNameœ:œsearch_resultsœ,œidœ:œPrompt-yDDjWœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Agent-9E8IU", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-9E8IUœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-yDDjW", + "targetHandle": "{œfieldNameœ: œsearch_resultsœ, œidœ: œPrompt-yDDjWœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-T4lL6", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-Rc3MO", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-T4lL6{œdataTypeœ:œPromptœ,œidœ:œPrompt-T4lL6œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-Rc3MO{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-Rc3MOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-T4lL6", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-T4lL6œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-Rc3MO", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-Rc3MOœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-Mzp4f", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-Rc3MO", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-Mzp4f{œdataTypeœ:œChatInputœ,œidœ:œChatInput-Mzp4fœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-Rc3MO{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Rc3MOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-Mzp4f", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-Mzp4fœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-Rc3MO", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-Rc3MOœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-f4xQ5", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-zhgF5", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-f4xQ5{œdataTypeœ:œPromptœ,œidœ:œPrompt-f4xQ5œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-zhgF5{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-zhgF5œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-f4xQ5", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-f4xQ5œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-zhgF5", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-zhgF5œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-u7GZR", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["previous_response"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "previous_response": { + "advanced": false, + "display_name": "previous_response", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "previous_response", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "\n\nRESEARCH PLAN: {previous_response}\n\nUse Tavily Search to investigate the queries and analyze the findings.\nFocus on academic and reliable sources.\n\nSteps:\n1. Search using provided queries\n2. Analyze search results\n3. Verify source credibility\n4. Extract key findings\n\nFormat findings as:\n\nSEARCH RESULTS:\n[Key findings from searches]\n\nSOURCE ANALYSIS:\n[Credibility assessment]\n\nMAIN INSIGHTS:\n[Critical discoveries]\n\nEVIDENCE QUALITY:\n[Evaluation of findings]" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 347, + "id": "Prompt-u7GZR", + "position": { + "x": 1803.2315476328304, + "y": 839.0423490089254 + }, + "positionAbsolute": { + "x": 1803.2315476328304, + "y": 839.0423490089254 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatInput-Mzp4f", + "node": { + "base_classes": ["Message"], + "beta": false, + "category": "inputs", + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "key": "ChatInput", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Research the effectiveness of different prompt engineering techniques in controlling AI hallucinations, with focus on real-world applications and empirical studies." + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-Mzp4f", + "position": { + "x": 756.0075981758582, + "y": 756.7423476254241 + }, + "positionAbsolute": { + "x": 756.0075981758582, + "y": 756.7423476254241 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-mWv8X", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-mWv8X", + "position": { + "x": 3200.774558432761, + "y": 853.9881404769172 + }, + "positionAbsolute": { + "x": 3200.774558432761, + "y": 853.9881404769172 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-yDDjW", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["search_results", "input_value"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "input_value": { + "advanced": false, + "display_name": "input_value", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "search_results": { + "advanced": false, + "display_name": "search_results", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "search_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "RESEARCH FINDINGS: {search_results}\nORIGINAL QUERY: {input_value}\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 433, + "id": "Prompt-yDDjW", + "position": { + "x": 2504.138359606453, + "y": 434.061360540584 + }, + "positionAbsolute": { + "x": 2504.138359606453, + "y": 434.061360540584 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "id": "TavilyAISearch-rI4aD", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "documentation": "https://docs.tavily.com/", + "edited": false, + "field_order": [ + "api_key", + "query", + "search_depth", + "topic", + "max_results", + "include_images", + "include_answer" + ], + "frozen": false, + "icon": "TavilyIcon", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": ["api_key"], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": ["api_key"], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Tavily API Key", + "dynamic": false, + "info": "Your Tavily API Key.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from enum import Enum\n\nimport httpx\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass TavilySearchDepth(Enum):\n BASIC = \"basic\"\n ADVANCED = \"advanced\"\n\n\nclass TavilySearchTopic(Enum):\n GENERAL = \"general\"\n NEWS = \"news\"\n\n\nclass TavilySearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query you want to execute with Tavily.\")\n search_depth: TavilySearchDepth = Field(TavilySearchDepth.BASIC, description=\"The depth of the search.\")\n topic: TavilySearchTopic = Field(TavilySearchTopic.GENERAL, description=\"The category of the search.\")\n max_results: int = Field(5, description=\"The maximum number of search results to return.\")\n include_images: bool = Field(default=False, description=\"Include a list of query-related images in the response.\")\n include_answer: bool = Field(default=False, description=\"Include a short answer to original query.\")\n\n\nclass TavilySearchToolComponent(LCToolComponent):\n display_name = \"Tavily AI Search\"\n description = \"\"\"**Tavily AI** is a search engine optimized for LLMs and RAG, \\\n aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n\"\"\"\n icon = \"TavilyIcon\"\n name = \"TavilyAISearch\"\n documentation = \"https://docs.tavily.com/\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Tavily API Key\",\n required=True,\n info=\"Your Tavily API Key.\",\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The search query you want to execute with Tavily.\",\n ),\n DropdownInput(\n name=\"search_depth\",\n display_name=\"Search Depth\",\n info=\"The depth of the search.\",\n options=list(TavilySearchDepth),\n value=TavilySearchDepth.ADVANCED,\n advanced=True,\n ),\n DropdownInput(\n name=\"topic\",\n display_name=\"Search Topic\",\n info=\"The category of the search.\",\n options=list(TavilySearchTopic),\n value=TavilySearchTopic.GENERAL,\n advanced=True,\n ),\n IntInput(\n name=\"max_results\",\n display_name=\"Max Results\",\n info=\"The maximum number of search results to return.\",\n value=5,\n advanced=True,\n ),\n BoolInput(\n name=\"include_images\",\n display_name=\"Include Images\",\n info=\"Include a list of query-related images in the response.\",\n value=True,\n advanced=True,\n ),\n BoolInput(\n name=\"include_answer\",\n display_name=\"Include Answer\",\n info=\"Include a short answer to original query.\",\n value=True,\n advanced=True,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n # Convert string values to enum instances with validation\n try:\n search_depth_enum = (\n self.search_depth\n if isinstance(self.search_depth, TavilySearchDepth)\n else TavilySearchDepth(str(self.search_depth).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid search depth value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n try:\n topic_enum = (\n self.topic if isinstance(self.topic, TavilySearchTopic) else TavilySearchTopic(str(self.topic).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid topic value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n return self._tavily_search(\n self.query,\n search_depth=search_depth_enum,\n topic=topic_enum,\n max_results=self.max_results,\n include_images=self.include_images,\n include_answer=self.include_answer,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"tavily_search\",\n description=\"Perform a web search using the Tavily API.\",\n func=self._tavily_search,\n args_schema=TavilySearchSchema,\n )\n\n def _tavily_search(\n self,\n query: str,\n *,\n search_depth: TavilySearchDepth = TavilySearchDepth.BASIC,\n topic: TavilySearchTopic = TavilySearchTopic.GENERAL,\n max_results: int = 5,\n include_images: bool = False,\n include_answer: bool = False,\n ) -> list[Data]:\n # Validate enum values\n if not isinstance(search_depth, TavilySearchDepth):\n msg = f\"Invalid search_depth value: {search_depth}\"\n raise TypeError(msg)\n if not isinstance(topic, TavilySearchTopic):\n msg = f\"Invalid topic value: {topic}\"\n raise TypeError(msg)\n\n try:\n url = \"https://api.tavily.com/search\"\n headers = {\n \"content-type\": \"application/json\",\n \"accept\": \"application/json\",\n }\n payload = {\n \"api_key\": self.api_key,\n \"query\": query,\n \"search_depth\": search_depth.value,\n \"topic\": topic.value,\n \"max_results\": max_results,\n \"include_images\": include_images,\n \"include_answer\": include_answer,\n }\n\n with httpx.Client() as client:\n response = client.post(url, json=payload, headers=headers)\n\n response.raise_for_status()\n search_results = response.json()\n\n data_results = [\n Data(\n data={\n \"title\": result.get(\"title\"),\n \"url\": result.get(\"url\"),\n \"content\": result.get(\"content\"),\n \"score\": result.get(\"score\"),\n }\n )\n for result in search_results.get(\"results\", [])\n ]\n\n if include_answer and search_results.get(\"answer\"):\n data_results.insert(0, Data(data={\"answer\": search_results[\"answer\"]}))\n\n if include_images and search_results.get(\"images\"):\n data_results.append(Data(data={\"images\": search_results[\"images\"]}))\n\n self.status = data_results # type: ignore[assignment]\n\n except httpx.HTTPStatusError as e:\n error_message = f\"HTTP error: {e.response.status_code} - {e.response.text}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n except Exception as e:\n error_message = f\"Unexpected error: {e}\"\n logger.opt(exception=True).debug(\"Error running Tavily Search\")\n self.status = error_message\n raise ToolException(error_message) from e\n return data_results\n" + }, + "include_answer": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Answer", + "dynamic": false, + "info": "Include a short answer to original query.", + "list": false, + "name": "include_answer", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "include_images": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Images", + "dynamic": false, + "info": "Include a list of query-related images in the response.", + "list": false, + "name": "include_images", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "max_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Results", + "dynamic": false, + "info": "The maximum number of search results to return.", + "list": false, + "name": "max_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "query": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Search Query", + "dynamic": false, + "info": "The search query you want to execute with Tavily.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "query", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "search_depth": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Depth", + "dynamic": false, + "info": "The depth of the search.", + "load_from_db": false, + "name": "search_depth", + "options": ["basic", "advanced"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "advanced" + }, + "topic": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Topic", + "dynamic": false, + "info": "The category of the search.", + "load_from_db": false, + "name": "topic", + "options": ["general", "news"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "general" + } + }, + "tool_mode": false + }, + "type": "TavilyAISearch" + }, + "dragging": false, + "height": 481, + "id": "TavilyAISearch-rI4aD", + "position": { + "x": 1802.2291194402355, + "y": 381.88177151343945 + }, + "positionAbsolute": { + "x": 1802.2291194402355, + "y": 381.88177151343945 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-Rc3MO", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-Rc3MO", + "position": { + "x": 1457.8987895868838, + "y": 543.8838473503562 + }, + "positionAbsolute": { + "x": 1457.8987895868838, + "y": 543.8838473503562 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-zhgF5", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-zhgF5", + "position": { + "x": 2860.2941186979524, + "y": 561.8661152181708 + }, + "positionAbsolute": { + "x": 2860.2941186979524, + "y": 561.8661152181708 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-gBUJ9", + "node": { + "description": "# Research Agent \n\nWelcome to the Research Agent! This flow helps you conduct in-depth research on various topics using AI-powered tools and analysis.\n\n## Instructions\n1. Enter Your Research Query\n - Type your research question or topic into the Chat Input node.\n - Be specific and clear about what you want to investigate.\n\n2. Generate Research Plan\n - The system will create a focused research plan based on your query.\n - This plan includes key search queries and priorities.\n\n3. Conduct Web Search\n - The Tavily AI Search tool will perform web searches using the generated queries.\n - It focuses on finding academic and reliable sources.\n\n4. Analyze and Synthesize\n - The AI agent will review the search results and create a comprehensive synthesis.\n - The report includes an executive summary, methodology, findings, and conclusions.\n\n5. Review the Output\n - Read the final report in the Chat Output node.\n - Use this information as a starting point for further research or decision-making.\n\nRemember: You can refine your initial query for more specific results! 🔍📊", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "emerald" + } + }, + "type": "note" + }, + "dragging": false, + "height": 765, + "id": "note-gBUJ9", + "position": { + "x": 471.4335708918645, + "y": -9.732869247334605 + }, + "positionAbsolute": { + "x": 471.4335708918645, + "y": -9.732869247334605 + }, + "resizing": false, + "selected": false, + "style": { + "height": 765, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-9E8IU", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a research analyst with access to Tavily Search." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 658, + "id": "Agent-9E8IU", + "position": { + "x": 2156.60686936856, + "y": 439.4579572266066 + }, + "positionAbsolute": { + "x": 2156.60686936856, + "y": 439.4579572266066 + }, + "selected": true, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-T4lL6", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are an expert research assistant.\n\nCreate a focused research plan that will guide our search.\n\nFormat your response exactly as:\n\nRESEARCH OBJECTIVE:\n[Clear statement of research goal]\n\nKEY SEARCH QUERIES:\n1. [Primary academic search query]\n2. [Secondary search query]\n3. [Alternative search approach]\n\nSEARCH PRIORITIES:\n- [What types of sources to focus on]\n- [Key aspects to investigate]\n- [Specific areas to explore]" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-T4lL6", + "position": { + "x": 1102.6079408836365, + "y": 550.2148817052229 + }, + "positionAbsolute": { + "x": 1102.6079408836365, + "y": 550.2148817052229 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-f4xQ5", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a research synthesis expert.\n\nCreate a comprehensive synthesis and report of our findings.\n\nFormat your response as:\n\nEXECUTIVE SUMMARY:\n[Key findings and implications]\n\nMETHODOLOGY:\n- Search Strategy Used\n- Sources Analyzed\n- Quality Assessment\n\nFINDINGS & ANALYSIS:\n[Detailed discussion of discoveries]\n\nCONCLUSIONS:\n[Main takeaways and insights]\n\nFUTURE DIRECTIONS:\n[Suggested next steps]\n\nIMPORTANT: For each major point or finding, include the relevant source link in square brackets at the end of the sentence or paragraph. For example: \"Harvard has developed a solid-state battery that charges in minutes. [Source: https://example.com/article]\"\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-f4xQ5", + "position": { + "x": 2498.9482347755306, + "y": 889.7491088138673 + }, + "positionAbsolute": { + "x": 2498.9482347755306, + "y": 889.7491088138673 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-86Le6", + "node": { + "description": "# 🔑 Tavily AI Search Needs API Key\n\nYou can get 1000 searches/month free [here](https://tavily.com/) ", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "lime" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-86Le6", + "position": { + "x": 1797.5781951055678, + "y": 206.30509875543274 + }, + "positionAbsolute": { + "x": 1797.5781951055678, + "y": 206.30509875543274 + }, + "selected": false, + "type": "noteNode", + "width": 325 + } + ], + "viewport": { + "x": -1627.3021649072248, + "y": -234.56097308583958, + "zoom": 0.9538000505518524 + } + }, + "description": "Agent that generates focused plans, conducts web searches, and synthesizes findings into comprehensive reports.", + "endpoint_name": null, + "icon": "TextSearchIcon", + "id": "67b16861-1344-465b-963a-c1c338623438", + "gradient": "5", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Research Agent", + "tags": ["assistants", "agents"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/SEO Keyword Generator.json b/src/backend/base/langflow/initial_setup/starter_projects/SEO Keyword Generator.json new file mode 100644 index 000000000000..4d00d3ca5597 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/SEO Keyword Generator.json @@ -0,0 +1,1052 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-FXOhu", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-E96kR", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-FXOhu{œdataTypeœ:œPromptœ,œidœ:œPrompt-FXOhuœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-E96kR{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-E96kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt-FXOhu", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-FXOhuœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-E96kR", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-E96kRœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-aMAQ2", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-E96kR", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-aMAQ2{œdataTypeœ:œPromptœ,œidœ:œPrompt-aMAQ2œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-E96kR{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-E96kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt-aMAQ2", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-aMAQ2œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-E96kR", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-E96kRœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-E96kR", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-aCpBy", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-E96kR{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-E96kRœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-aCpBy{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-aCpByœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-E96kR", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-E96kRœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-aCpBy", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-aCpByœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-FXOhu", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "product", + "pain_points", + "goals", + "current_solutions", + "target_audience", + "expertise_level" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "current_solutions": { + "advanced": false, + "display_name": "current_solutions", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "current_solutions", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "- Basic website blockers\n- Manual \"Do Not Disturb\" modes\n- Traditional time management apps\n- Paper planners and to-do lists\n- Pomodoro timer apps\n- Calendar blocking\n" + }, + "expertise_level": { + "advanced": false, + "display_name": "expertise_level", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "expertise_level", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "Intermediate to Advanced - Users are familiar with basic productivity tools and concepts but seek more sophisticated solutions. They understand terms like \"deep work\" and \"time blocking\" and are comfortable adopting new technology that promises meaningful improvements to their workflow." + }, + "goals": { + "advanced": false, + "display_name": "goals", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "goals", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "- Achieve longer periods of uninterrupted focus\n- Improve work efficiency and output quality\n- Develop sustainable productivity habits\n- Better manage time and energy levels\n- Reduce stress from digital overwhelm\n- Create more balanced workdays" + }, + "pain_points": { + "advanced": false, + "display_name": "pain_points", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "pain_points", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "- Constant interruptions from notifications and social media\n- Difficulty maintaining sustained focus during deep work\n- Inconsistent productivity levels throughout the day\n- Struggle to build effective work routines\n- Time wasted switching between tasks\n- Burnout from poor work-life balance\n" + }, + "product": { + "advanced": false, + "display_name": "product", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "product", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "FocusFlow - An AI-powered productivity app that automatically detects and blocks digital distractions while learning from user behavior to create personalized focus schedules. Features include smart notification management, work pattern analysis, and adaptive focus modes." + }, + "target_audience": { + "advanced": false, + "display_name": "target_audience", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "target_audience", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "Knowledge workers aged 25-45, primarily working in tech, creative, or professional services. They are tech-savvy professionals who work remotely or in hybrid settings, earning $75,000+ annually. They value work-life balance and are willing to invest in tools that boost their productivity. Many are active on LinkedIn and tech-focused platforms, regularly consuming content about personal development and productivity." + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "Product:\n{product}\n \nPain Points:\n{pain_points}\n \nGoals:\n{goals}\n \nCurrent Solutions:\n{current_solutions}\n \nSpecific Target Audience:\n{target_audience}\n\nExpertise Level:\n{expertise_level}\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 779, + "id": "Prompt-FXOhu", + "position": { + "x": 816.9328565352126, + "y": 189.70442453076902 + }, + "positionAbsolute": { + "x": 816.9328565352126, + "y": 189.70442453076902 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-5yMAo", + "node": { + "description": "# SEO Keyword Generator\nWelcome to the SEO Keywords Generator - an AI tool to create strategic keywords based on your product and audience profile!\n\n## Instructions\n\n1. **Fill Product Information**\n - Enter your product name and description\n - Keep it clear and specific\n - Highlight unique features and benefits\n\n2. **Define Pain Points**\n - List customer problems and challenges\n - Be specific about what frustrations they face\n - Include both practical and emotional pain points\n\n3. **Set Goals & Solutions**\n - Specify customer objectives\n - Detail how they currently solve problems\n - Outline desired outcomes\n\n4. **Target Audience Details**\n - Define demographics and characteristics\n - Include expertise level\n - Describe behavior patterns and preferences\n\n5. **Review Output**\n - Examine generated keywords\n - Check relevance and search intent\n - Use insights for SEO strategy planning\n\nRemember: The more detailed your input, the more targeted and effective your keywords will be! 🎯🔍✨", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 607, + "id": "note-5yMAo", + "position": { + "x": 221.74248905040588, + "y": 363.5469410934121 + }, + "positionAbsolute": { + "x": 221.74248905040588, + "y": 363.5469410934121 + }, + "resizing": false, + "selected": false, + "style": { + "height": 607, + "width": 489 + }, + "type": "noteNode", + "width": 489 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-aMAQ2", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are a digital marketing strategist specialized in generating highly relevant, optimized keywords for a product’s specific target audience. Your task is to create a list of keywords that are not only attractive and impactful but also resonate with the needs and desires of the customers, capturing the core motivations driving them to seek a solution.\n\nBelow are details about the product, including its target audience, the pain points faced by this audience, and the current solutions they consider or use. Use this information to generate precise keywords that connect directly with the unique value of the product and with the customers' goals. Consider factors like the customer’s level of expertise and major market trends to create a powerful and well-grounded keyword list.\n\n### Product Information:\n- **Product:** – A brief description of the product, including what sets it apart in the market.\n- **Customer Pain Points:** – Specific pain points that the audience faces and that the product aims to address.\n- **Customer Goals:** – The primary goals and aspirations of the target audience that the product helps to achieve.\n- **Current Solutions Used:** – How the audience currently tries to address these pain points, including competitor solutions or alternatives.\n- **Specific Target Audience:** – A detailed description of the target audience, including demographics, interests, lifestyle, and behavioral profile.\n- **Customer Expertise Level:**– The level of familiarity or experience the audience has with similar or related solutions.\n\n### Guidelines for Keyword Generation:\n1. **Focus on Pain Points and Solutions**: Generate keywords that accurately reflect the customers’ pain points, clearly conveying how the product offers an effective and unique solution.\n2. **Emphasize Goals and Benefits**: Highlight keywords aligned with customer goals, emphasizing the positive impact and achievable results of the product.\n3. **Consider Competition and Differentiators**: Think about existing solutions and how the product stands out. Create keywords that emphasize differentiators and help the product stand out in a competitive landscape.\n4. **Tailor to Target Audience**: Use terms and phrases that resonate directly with the target audience’s profile, utilizing language and themes most appealing to this segment.\n5. **Customize to Expertise Level**: Adjust the complexity of the keywords according to the audience’s experience level, ensuring they are appealing and accessible.\n6. **Incorporate Market Trends**: Where possible, include keywords that reflect the latest trends in the sector, increasing the content’s relevance and timeliness.\n\n### Example Keyword Suggestions:\n- **For customer pain points:** – Use keywords that reinforce customer pain points, making it clear how the product can be a solution.\n- **For goals and aspirations:** – Keywords that symbolize the outcomes and goals desired by customers, such as ‘stress relief,’ ‘productivity boost.’\n- **For product differentiators:** – Keywords that contrast the product with current solutions, highlighting its unique advantages.\n\nFor each keyword generated, provide a brief explanation of how it connects with the product details and the target audience, ensuring the final list is powerful, strategic, and well-founded for maximum market impact." + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-aMAQ2", + "position": { + "x": 813.5727530934735, + "y": 991.0702563306074 + }, + "positionAbsolute": { + "x": 813.5727530934735, + "y": 991.0702563306074 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-aCpBy", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-aCpBy", + "position": { + "x": 1598.2529634286327, + "y": 623.4799714496987 + }, + "positionAbsolute": { + "x": 1598.2529634286327, + "y": 623.4799714496987 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-E96kR", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 543, + "id": "OpenAIModel-E96kR", + "position": { + "x": 1207.6180121002271, + "y": 400.34849438035565 + }, + "positionAbsolute": { + "x": 1207.6180121002271, + "y": 400.34849438035565 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-qiq3C", + "node": { + "description": "## Make sure to add your OpenAI API key from [platform.openai.com](https://platform.openai.com). ", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-qiq3C", + "position": { + "x": 1207.1996899547116, + "y": 260.0148704431837 + }, + "positionAbsolute": { + "x": 1207.1996899547116, + "y": 260.0148704431837 + }, + "selected": false, + "type": "noteNode", + "width": 325 + } + ], + "viewport": { + "x": -106.23719516361678, + "y": -93.96601486484758, + "zoom": 0.7759242611491008 + } + }, + "description": "Generates targeted SEO keywords based on product information, pain points, and customer profiles for strategic marketing.", + "endpoint_name": null, + "icon": "List", + "id": "3432cbdf-c4af-43b2-96ef-5f8155cf24d1", + "gradient": "2", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "SEO Keyword Generator", + "tags": ["chatbots", "assistants"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/SaaS Pricing.json b/src/backend/base/langflow/initial_setup/starter_projects/SaaS Pricing.json new file mode 100644 index 000000000000..9ad76c5b2e29 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/SaaS Pricing.json @@ -0,0 +1,1259 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "CalculatorTool", + "id": "CalculatorTool-DF8xQ", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-5e01q", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-CalculatorTool-DF8xQ{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-DF8xQœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-5e01q{œfieldNameœ:œtoolsœ,œidœ:œAgent-5e01qœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "CalculatorTool-DF8xQ", + "sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-DF8xQœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-5e01q", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-5e01qœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-5e01q", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-s1eJK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-5e01q{œdataTypeœ:œAgentœ,œidœ:œAgent-5e01qœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-s1eJK{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-s1eJKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-5e01q", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-5e01qœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-s1eJK", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-s1eJKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-KkcsZ", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-5e01q", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-KkcsZ{œdataTypeœ:œPromptœ,œidœ:œPrompt-KkcsZœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-Agent-5e01q{œfieldNameœ:œinput_valueœ,œidœ:œAgent-5e01qœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-KkcsZ", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-KkcsZœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-5e01q", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-5e01qœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-KkcsZ", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "monthly_infrastructure_costs", + "customer_support_cost", + "continuous_development_cost", + "desired_profit_margin", + "estimated_subscribers" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "continuous_development_cost": { + "advanced": false, + "display_name": "continuous_development_cost", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "continuous_development_cost", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "3000" + }, + "customer_support_cost": { + "advanced": false, + "display_name": "customer_support_cost", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "customer_support_cost", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "1000" + }, + "desired_profit_margin": { + "advanced": false, + "display_name": "desired_profit_margin", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "desired_profit_margin", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "30" + }, + "estimated_subscribers": { + "advanced": false, + "display_name": "estimated_subscribers", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "estimated_subscribers", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "200" + }, + "monthly_infrastructure_costs": { + "advanced": false, + "display_name": "monthly_infrastructure_costs", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "monthly_infrastructure_costs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "2000" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "To calculate the monthly subscription price of the software based on the following data:\n\nMonthly infrastructure costs: ${monthly_infrastructure_costs}\nCustomer support: ${customer_support_cost}\nContinuous development: {continuous_development_cost}\nDesired profit margin: {desired_profit_margin}%\nEstimated number of subscribers: {estimated_subscribers}\n\nFollow the step to formulate the answer:\nFixed costs:\nProfit margin:\nTotal amount needed:\nPrice per subscriber:\nThe minimum subscription price per subscriber is:" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 693, + "id": "Prompt-KkcsZ", + "position": { + "x": 1349.861745038984, + "y": 347.90475109976467 + }, + "positionAbsolute": { + "x": 1349.861745038984, + "y": 347.90475109976467 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-s1eJK", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-s1eJK", + "position": { + "x": 2240.3625274769397, + "y": 355.16302699218204 + }, + "positionAbsolute": { + "x": 2240.3625274769397, + "y": 355.16302699218204 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-H6OpG", + "node": { + "description": "# SaaS Pricing Calculator\n\nWelcome to the SaaS Pricing Calculator! This flow helps you determine the optimal monthly subscription price for your software service.\n\n## Instructions\n\n1. Prepare Your Data\n - Gather information on monthly infrastructure costs\n - Calculate customer support expenses\n - Estimate continuous development costs\n - Decide on your desired profit margin\n - Determine the estimated number of subscribers\n\n2. Input Values\n - Enter the gathered data into the respective fields in the Prompt node\n - Double-check the accuracy of your inputs\n\n3. Run the Flow\n - Click the \"Run\" button to start the calculation process\n - The flow will use Chain-of-Thought prompting to guide the AI through the steps\n\n4. Review the Results\n - Examine the output in the Chat Output node\n - The result will show a breakdown of costs and the final subscription price\n\n5. Adjust and Refine\n - If needed, modify your inputs to explore different pricing scenarios\n - Re-run the flow to see how changes affect the final price\n\nRemember: Regularly update your costs and subscriber estimates to keep your pricing model accurate and competitive! 💼📊", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 800, + "id": "note-H6OpG", + "position": { + "x": 689.7659055360411, + "y": 68.95847391680593 + }, + "positionAbsolute": { + "x": 689.7659055360411, + "y": 68.95847391680593 + }, + "resizing": false, + "selected": false, + "style": { + "height": 800, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-5e01q", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "# Subscription Pricing Calculator\n\n## Purpose\nCalculate the optimal monthly subscription price for a software product based on operational costs, desired profit margin, and estimated subscriber base.\n\n## Input Variables\nThe system requires the following inputs:\n- Monthly infrastructure costs (numeric)\n- Customer support costs (numeric)\n- Continuous development costs (numeric)\n- Desired profit margin (percentage)\n- Estimated number of subscribers (numeric)\n\n## Calculation Process\nFollow these steps to determine the subscription price:\n\n### Step 1: Total Monthly Costs\nCalculate the sum of all fixed operational costs:\n```\ntotal_monthly_costs = infrastructure_costs + support_costs + development_costs\n```\n\n### Step 2: Profit Margin Calculation\nCalculate the profit margin amount based on total costs:\n```\nprofit_amount = total_monthly_costs × (profit_margin_percentage / 100)\n```\n\n### Step 3: Total Revenue Required\nCalculate the total monthly revenue needed:\n```\ntotal_revenue_needed = total_monthly_costs + profit_amount\n```\n\n### Step 4: Per-Subscriber Price\nCalculate the minimum price per subscriber:\n```\nsubscription_price = total_revenue_needed ÷ estimated_subscribers\n```\n\n## Output Format\nPresent the results in the following structure:\n\nFixed costs: [sum of all costs]\nProfit margin: [calculated profit amount]\nTotal amount needed: [total revenue required]\nPrice per subscriber: [calculated subscription price]\n\nFinal recommendation: \"The minimum subscription price per subscriber should be [price] to achieve the desired profit margin of [percentage]%\"\n\n## Notes\n- All monetary values should be rounded to 2 decimal places\n- Ensure all input values are positive numbers\n- Validate that the estimated subscribers count is greater than zero\n- The profit margin percentage should be between 0 and 100" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-5e01q", + "position": { + "x": 1819.2633856623966, + "y": 138.32023808479687 + }, + "positionAbsolute": { + "x": 1819.2633856623966, + "y": 138.32023808479687 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "CalculatorTool-DF8xQ", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Perform basic arithmetic operations on a given expression.", + "display_name": "Calculator", + "documentation": "", + "edited": false, + "field_order": ["expression"], + "frozen": false, + "icon": "calculator", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": [], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": [], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import ast\nimport operator\n\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import MessageTextInput\nfrom langflow.schema import Data\n\n\nclass CalculatorToolComponent(LCToolComponent):\n display_name = \"Calculator\"\n description = \"Perform basic arithmetic operations on a given expression.\"\n icon = \"calculator\"\n name = \"CalculatorTool\"\n\n inputs = [\n MessageTextInput(\n name=\"expression\",\n display_name=\"Expression\",\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\n tool_mode=True,\n ),\n ]\n\n class CalculatorToolSchema(BaseModel):\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\n\n def run_model(self) -> list[Data]:\n return self._evaluate_expression(self.expression)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"calculator\",\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\n func=self._eval_expr_with_error,\n args_schema=self.CalculatorToolSchema,\n )\n\n def _eval_expr(self, node):\n # Define the allowed operators\n operators = {\n ast.Add: operator.add,\n ast.Sub: operator.sub,\n ast.Mult: operator.mul,\n ast.Div: operator.truediv,\n ast.Pow: operator.pow,\n }\n if isinstance(node, ast.Num):\n return node.n\n if isinstance(node, ast.BinOp):\n return operators[type(node.op)](self._eval_expr(node.left), self._eval_expr(node.right))\n if isinstance(node, ast.UnaryOp):\n return operators[type(node.op)](self._eval_expr(node.operand))\n if isinstance(node, ast.Call):\n msg = (\n \"Function calls like sqrt(), sin(), cos() etc. are not supported. \"\n \"Only basic arithmetic operations (+, -, *, /, **) are allowed.\"\n )\n raise TypeError(msg)\n msg = f\"Unsupported operation or expression type: {type(node).__name__}\"\n raise TypeError(msg)\n\n def _eval_expr_with_error(self, expression: str) -> list[Data]:\n try:\n return self._evaluate_expression(expression)\n except Exception as e:\n raise ToolException(str(e)) from e\n\n def _evaluate_expression(self, expression: str) -> list[Data]:\n try:\n # Parse the expression and evaluate it\n tree = ast.parse(expression, mode=\"eval\")\n result = self._eval_expr(tree.body)\n\n # Format the result to a reasonable number of decimal places\n formatted_result = f\"{result:.6f}\".rstrip(\"0\").rstrip(\".\")\n\n self.status = formatted_result\n return [Data(data={\"result\": formatted_result})]\n\n except (SyntaxError, TypeError, KeyError) as e:\n error_message = f\"Invalid expression: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except ZeroDivisionError:\n error_message = \"Error: Division by zero\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except Exception as e: # noqa: BLE001\n logger.opt(exception=True).debug(\"Error evaluating expression\")\n error_message = f\"Error: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n" + }, + "expression": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Expression", + "dynamic": false, + "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "expression", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "CalculatorTool" + }, + "dragging": false, + "height": 167, + "id": "CalculatorTool-DF8xQ", + "position": { + "x": 1347.154214046272, + "y": 28.770424745017564 + }, + "positionAbsolute": { + "x": 1347.154214046272, + "y": 28.770424745017564 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -251.17743782763955, + "y": 134.52045967838717, + "zoom": 0.6368650431844803 + } + }, + "description": "Calculate SaaS subscription price based on costs, profit margin, and subscribers using step-by-step method and Chain-of-Thought prompting. ", + "endpoint_name": null, + "icon": "calculator", + "id": "9357f72e-2121-4541-8e7d-74b7ba2ada2b", + "gradient": "3", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "SaaS Pricing", + "tags": ["agents", "assistants"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json deleted file mode 100644 index 258af3ae731b..000000000000 --- a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json +++ /dev/null @@ -1,2632 +0,0 @@ -{ - "data": { - "edges": [ - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "SequentialCrewComponent", - "id": "SequentialCrewComponent-3dbbB", - "name": "output", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-nwCjg", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-SequentialCrewComponent-3dbbB{œdataTypeœ:œSequentialCrewComponentœ,œidœ:œSequentialCrewComponent-3dbbBœ,œnameœ:œoutputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-nwCjg{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-nwCjgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "SequentialCrewComponent-3dbbB", - "sourceHandle": "{œdataTypeœ: œSequentialCrewComponentœ, œidœ: œSequentialCrewComponent-3dbbBœ, œnameœ: œoutputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-nwCjg", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-nwCjgœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "TextInput", - "id": "TextInput-6QUGr", - "name": "text", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "topic", - "id": "Prompt-GOdlL", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-TextInput-6QUGr{œdataTypeœ:œTextInputœ,œidœ:œTextInput-6QUGrœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-GOdlL{œfieldNameœ:œtopicœ,œidœ:œPrompt-GOdlLœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-6QUGr", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-6QUGrœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-GOdlL", - "targetHandle": "{œfieldNameœ: œtopicœ, œidœ: œPrompt-GOdlLœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "TextInput", - "id": "TextInput-6QUGr", - "name": "text", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "topic", - "id": "Prompt-824D7", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-TextInput-6QUGr{œdataTypeœ:œTextInputœ,œidœ:œTextInput-6QUGrœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-824D7{œfieldNameœ:œtopicœ,œidœ:œPrompt-824D7œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-6QUGr", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-6QUGrœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-824D7", - "targetHandle": "{œfieldNameœ: œtopicœ, œidœ: œPrompt-824D7œ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "TextInput", - "id": "TextInput-6QUGr", - "name": "text", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "topic", - "id": "Prompt-0vHob", - "inputTypes": [ - "Message", - "Text" - ], - "type": "str" - } - }, - "id": "reactflow__edge-TextInput-6QUGr{œdataTypeœ:œTextInputœ,œidœ:œTextInput-6QUGrœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-0vHob{œfieldNameœ:œtopicœ,œidœ:œPrompt-0vHobœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "TextInput-6QUGr", - "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-6QUGrœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-0vHob", - "targetHandle": "{œfieldNameœ: œtopicœ, œidœ: œPrompt-0vHobœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-GOdlL", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "task_description", - "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-GOdlL{œdataTypeœ:œPromptœ,œidœ:œPrompt-GOdlLœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-SequentialTaskAgentComponent-GWMA1{œfieldNameœ:œtask_descriptionœ,œidœ:œSequentialTaskAgentComponent-GWMA1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-GOdlL", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-GOdlLœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "SequentialTaskAgentComponent-GWMA1", - "targetHandle": "{œfieldNameœ: œtask_descriptionœ, œidœ: œSequentialTaskAgentComponent-GWMA1œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "className": "", - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-lQ5HF", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-lQ5HF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-lQ5HFœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-SequentialTaskAgentComponent-GWMA1{œfieldNameœ:œllmœ,œidœ:œSequentialTaskAgentComponent-GWMA1œ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "source": "OpenAIModel-lQ5HF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-lQ5HFœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "SequentialTaskAgentComponent-GWMA1", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œSequentialTaskAgentComponent-GWMA1œ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-lQ5HF", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-lQ5HF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-lQ5HFœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-SequentialTaskAgentComponent-5i4Wg{œfieldNameœ:œllmœ,œidœ:œSequentialTaskAgentComponent-5i4Wgœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "source": "OpenAIModel-lQ5HF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-lQ5HFœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "SequentialTaskAgentComponent-5i4Wg", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œSequentialTaskAgentComponent-5i4Wgœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-824D7", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "task_description", - "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-824D7{œdataTypeœ:œPromptœ,œidœ:œPrompt-824D7œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-SequentialTaskAgentComponent-5i4Wg{œfieldNameœ:œtask_descriptionœ,œidœ:œSequentialTaskAgentComponent-5i4Wgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-824D7", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-824D7œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "SequentialTaskAgentComponent-5i4Wg", - "targetHandle": "{œfieldNameœ: œtask_descriptionœ, œidœ: œSequentialTaskAgentComponent-5i4Wgœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "SequentialTaskAgentComponent", - "id": "SequentialTaskAgentComponent-GWMA1", - "name": "task_output", - "output_types": [ - "SequentialTask" - ] - }, - "targetHandle": { - "fieldName": "previous_task", - "id": "SequentialTaskAgentComponent-5i4Wg", - "inputTypes": [ - "SequentialTask" - ], - "type": "other" - } - }, - "id": "reactflow__edge-SequentialTaskAgentComponent-GWMA1{œdataTypeœ:œSequentialTaskAgentComponentœ,œidœ:œSequentialTaskAgentComponent-GWMA1œ,œnameœ:œtask_outputœ,œoutput_typesœ:[œSequentialTaskœ]}-SequentialTaskAgentComponent-5i4Wg{œfieldNameœ:œprevious_taskœ,œidœ:œSequentialTaskAgentComponent-5i4Wgœ,œinputTypesœ:[œSequentialTaskœ],œtypeœ:œotherœ}", - "source": "SequentialTaskAgentComponent-GWMA1", - "sourceHandle": "{œdataTypeœ: œSequentialTaskAgentComponentœ, œidœ: œSequentialTaskAgentComponent-GWMA1œ, œnameœ: œtask_outputœ, œoutput_typesœ: [œSequentialTaskœ]}", - "target": "SequentialTaskAgentComponent-5i4Wg", - "targetHandle": "{œfieldNameœ: œprevious_taskœ, œidœ: œSequentialTaskAgentComponent-5i4Wgœ, œinputTypesœ: [œSequentialTaskœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "SequentialTaskAgentComponent", - "id": "SequentialTaskAgentComponent-5i4Wg", - "name": "task_output", - "output_types": [ - "SequentialTask" - ] - }, - "targetHandle": { - "fieldName": "previous_task", - "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": [ - "SequentialTask" - ], - "type": "other" - } - }, - "id": "reactflow__edge-SequentialTaskAgentComponent-5i4Wg{œdataTypeœ:œSequentialTaskAgentComponentœ,œidœ:œSequentialTaskAgentComponent-5i4Wgœ,œnameœ:œtask_outputœ,œoutput_typesœ:[œSequentialTaskœ]}-SequentialTaskAgentComponent-TPEWE{œfieldNameœ:œprevious_taskœ,œidœ:œSequentialTaskAgentComponent-TPEWEœ,œinputTypesœ:[œSequentialTaskœ],œtypeœ:œotherœ}", - "source": "SequentialTaskAgentComponent-5i4Wg", - "sourceHandle": "{œdataTypeœ: œSequentialTaskAgentComponentœ, œidœ: œSequentialTaskAgentComponent-5i4Wgœ, œnameœ: œtask_outputœ, œoutput_typesœ: [œSequentialTaskœ]}", - "target": "SequentialTaskAgentComponent-TPEWE", - "targetHandle": "{œfieldNameœ: œprevious_taskœ, œidœ: œSequentialTaskAgentComponent-TPEWEœ, œinputTypesœ: [œSequentialTaskœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-0vHob", - "name": "prompt", - "output_types": [ - "Message" - ] - }, - "targetHandle": { - "fieldName": "task_description", - "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": [ - "Message" - ], - "type": "str" - } - }, - "id": "reactflow__edge-Prompt-0vHob{œdataTypeœ:œPromptœ,œidœ:œPrompt-0vHobœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-SequentialTaskAgentComponent-TPEWE{œfieldNameœ:œtask_descriptionœ,œidœ:œSequentialTaskAgentComponent-TPEWEœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-0vHob", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-0vHobœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "SequentialTaskAgentComponent-TPEWE", - "targetHandle": "{œfieldNameœ: œtask_descriptionœ, œidœ: œSequentialTaskAgentComponent-TPEWEœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "SequentialTaskAgentComponent", - "id": "SequentialTaskAgentComponent-TPEWE", - "name": "task_output", - "output_types": [ - "SequentialTask" - ] - }, - "targetHandle": { - "fieldName": "tasks", - "id": "SequentialCrewComponent-3dbbB", - "inputTypes": [ - "SequentialTask" - ], - "type": "other" - } - }, - "id": "reactflow__edge-SequentialTaskAgentComponent-TPEWE{œdataTypeœ:œSequentialTaskAgentComponentœ,œidœ:œSequentialTaskAgentComponent-TPEWEœ,œnameœ:œtask_outputœ,œoutput_typesœ:[œSequentialTaskœ]}-SequentialCrewComponent-3dbbB{œfieldNameœ:œtasksœ,œidœ:œSequentialCrewComponent-3dbbBœ,œinputTypesœ:[œSequentialTaskœ],œtypeœ:œotherœ}", - "source": "SequentialTaskAgentComponent-TPEWE", - "sourceHandle": "{œdataTypeœ: œSequentialTaskAgentComponentœ, œidœ: œSequentialTaskAgentComponent-TPEWEœ, œnameœ: œtask_outputœ, œoutput_typesœ: [œSequentialTaskœ]}", - "target": "SequentialCrewComponent-3dbbB", - "targetHandle": "{œfieldNameœ: œtasksœ, œidœ: œSequentialCrewComponent-3dbbBœ, œinputTypesœ: [œSequentialTaskœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-lQ5HF", - "name": "model_output", - "output_types": [ - "LanguageModel" - ] - }, - "targetHandle": { - "fieldName": "llm", - "id": "SequentialTaskAgentComponent-TPEWE", - "inputTypes": [ - "LanguageModel" - ], - "type": "other" - } - }, - "id": "reactflow__edge-OpenAIModel-lQ5HF{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-lQ5HFœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-SequentialTaskAgentComponent-TPEWE{œfieldNameœ:œllmœ,œidœ:œSequentialTaskAgentComponent-TPEWEœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}", - "source": "OpenAIModel-lQ5HF", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-lQ5HFœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}", - "target": "SequentialTaskAgentComponent-TPEWE", - "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œSequentialTaskAgentComponent-TPEWEœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}" - }, - { - "data": { - "sourceHandle": { - "dataType": "YFinanceTool", - "id": "YFinanceTool-Asoka", - "name": "tool", - "output_types": [ - "Tool" - ] - }, - "targetHandle": { - "fieldName": "tools", - "id": "SequentialTaskAgentComponent-GWMA1", - "inputTypes": [ - "Tool" - ], - "type": "other" - } - }, - "id": "reactflow__edge-YFinanceTool-Asoka{œdataTypeœ:œYFinanceToolœ,œidœ:œYFinanceTool-Asokaœ,œnameœ:œtoolœ,œoutput_typesœ:[œToolœ]}-SequentialTaskAgentComponent-GWMA1{œfieldNameœ:œtoolsœ,œidœ:œSequentialTaskAgentComponent-GWMA1œ,œinputTypesœ:[œToolœ],œtypeœ:œotherœ}", - "source": "YFinanceTool-Asoka", - "sourceHandle": "{œdataTypeœ: œYFinanceToolœ, œidœ: œYFinanceTool-Asokaœ, œnameœ: œtoolœ, œoutput_typesœ: [œToolœ]}", - "target": "SequentialTaskAgentComponent-GWMA1", - "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œSequentialTaskAgentComponent-GWMA1œ, œinputTypesœ: [œToolœ], œtypeœ: œotherœ}" - } - ], - "nodes": [ - { - "data": { - "description": "Represents a group of agents, defining how they should collaborate and the tasks they should perform.", - "display_name": "Sequential Crew", - "id": "SequentialCrewComponent-3dbbB", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Represents a group of agents with tasks that are executed sequentially.", - "display_name": "Sequential Crew", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "verbose", - "memory", - "use_cache", - "max_rpm", - "share_crew", - "function_calling_llm", - "tasks" - ], - "frozen": false, - "icon": "CrewAI", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Output", - "method": "build_output", - "name": "output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent, Crew, Process, Task # type: ignore\n\nfrom langflow.base.agents.crewai.crew import BaseCrewComponent\nfrom langflow.io import HandleInput\nfrom langflow.schema.message import Message\n\n\nclass SequentialCrewComponent(BaseCrewComponent):\n display_name: str = \"Sequential Crew\"\n description: str = \"Represents a group of agents with tasks that are executed sequentially.\"\n documentation: str = \"https://docs.crewai.com/how-to/Sequential/\"\n icon = \"CrewAI\"\n\n inputs = BaseCrewComponent._base_inputs + [\n HandleInput(name=\"tasks\", display_name=\"Tasks\", input_types=[\"SequentialTask\"], is_list=True),\n ]\n\n def get_tasks_and_agents(self) -> tuple[list[Task], list[Agent]]:\n return self.tasks, [task.agent for task in self.tasks]\n\n def build_crew(self) -> Message:\n tasks, agents = self.get_tasks_and_agents()\n crew = Crew(\n agents=agents,\n tasks=tasks,\n process=Process.sequential,\n verbose=self.verbose,\n memory=self.memory,\n cache=self.use_cache,\n max_rpm=self.max_rpm,\n share_crew=self.share_crew,\n function_calling_llm=self.function_calling_llm,\n step_callback=self.get_step_callback(),\n task_callback=self.get_task_callback(),\n )\n return crew\n" - }, - "function_calling_llm": { - "advanced": true, - "display_name": "Function Calling LLM", - "dynamic": false, - "info": "Turns the ReAct CrewAI agent into a function-calling agent", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "function_calling_llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "max_rpm": { - "advanced": true, - "display_name": "Max RPM", - "dynamic": false, - "info": "", - "list": false, - "name": "max_rpm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 100 - }, - "memory": { - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "share_crew": { - "advanced": true, - "display_name": "Share Crew", - "dynamic": false, - "info": "", - "list": false, - "name": "share_crew", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "tasks": { - "advanced": false, - "display_name": "Tasks", - "dynamic": false, - "info": "", - "input_types": [ - "SequentialTask" - ], - "list": true, - "name": "tasks", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "use_cache": { - "advanced": true, - "display_name": "Cache", - "dynamic": false, - "info": "", - "list": false, - "name": "use_cache", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "verbose": { - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 0 - } - } - }, - "type": "SequentialCrewComponent" - }, - "dragging": false, - "height": 284, - "id": "SequentialCrewComponent-3dbbB", - "position": { - "x": 1452.9740869513873, - "y": 217.5447804074488 - }, - "positionAbsolute": { - "x": 1452.9740869513873, - "y": 217.5447804074488 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "OpenAIModel-lQ5HF", - "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "openai_api_key", - "temperature", - "stream", - "system_message", - "seed" - ], - "frozen": false, - "icon": "OpenAI", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "api_key": { - "_input_type": "SecretStrInput", - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "max_tokens": { - "advanced": true, - "display_name": "Max Tokens", - "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", - "list": false, - "name": "max_tokens", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "model_name": { - "advanced": false, - "display_name": "Model Name", - "dynamic": false, - "info": "", - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-3.5-turbo" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", - "list": false, - "load_from_db": false, - "name": "openai_api_base", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { - "advanced": true, - "display_name": "Stream", - "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", - "list": false, - "name": "stream", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "system_message": { - "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", - "dynamic": false, - "info": "", - "list": false, - "name": "temperature", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "float", - "value": 0.1 - } - } - }, - "type": "OpenAIModel" - }, - "dragging": false, - "height": 601, - "id": "OpenAIModel-lQ5HF", - "position": { - "x": -2046.2369515771168, - "y": -396.97559934517443 - }, - "positionAbsolute": { - "x": -2046.2369515771168, - "y": -396.97559934517443 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-nwCjg", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" - }, - "data_template": { - "advanced": true, - "display_name": "Data Template", - "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "data_template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "{text}" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "sender": { - "advanced": true, - "display_name": "Sender Type", - "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "Machine" - }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", - "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "AI" - }, - "session_id": { - "advanced": true, - "display_name": "Session ID", - "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "session_id", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "should_store_message": { - "advanced": true, - "display_name": "Store Messages", - "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 298, - "id": "ChatOutput-nwCjg", - "position": { - "x": 1938.1856451557874, - "y": 227.4117341237682 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "TextInput-6QUGr", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Get text inputs from the Playground.", - "display_name": "Topic", - "documentation": "", - "edited": false, - "field_order": [ - "input_value" - ], - "frozen": false, - "icon": "type", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MessageTextInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n message = Message(\n text=self.input_value,\n )\n return message\n" - }, - "input_value": { - "advanced": false, - "display_name": "Text", - "dynamic": false, - "info": "Text to be passed as input.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Agile" - } - } - }, - "type": "TextInput" - }, - "dragging": false, - "height": 298, - "id": "TextInput-6QUGr", - "position": { - "x": -2044.1039646921665, - "y": 291.77565151149054 - }, - "positionAbsolute": { - "x": -2044.1039646921665, - "y": 291.77565151149054 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-GOdlL", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "topic" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "field_order": [ - "template" - ], - "frozen": false, - "icon": "prompts", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Topic: {topic}\n\nBuild a document about this document." - }, - "topic": { - "advanced": false, - "display_name": "topic", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "topic", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 412, - "id": "Prompt-GOdlL", - "position": { - "x": -1154.4124217561132, - "y": 813.2475923059123 - }, - "positionAbsolute": { - "x": -1154.4124217561132, - "y": 813.2475923059123 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-824D7", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "topic" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "field_order": [ - "template" - ], - "frozen": false, - "icon": "prompts", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Topic: {topic}\n\nRevise this document." - }, - "topic": { - "advanced": false, - "display_name": "topic", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "topic", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 412, - "id": "Prompt-824D7", - "position": { - "x": -369.56336473301, - "y": 790.2887357303061 - }, - "positionAbsolute": { - "x": -369.56336473301, - "y": 790.2887357303061 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-0vHob", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": { - "template": [ - "topic" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", - "edited": false, - "field_order": [ - "template" - ], - "frozen": false, - "icon": "prompts", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" - }, - "template": { - "advanced": false, - "display_name": "Template", - "dynamic": false, - "info": "", - "list": false, - "load_from_db": false, - "name": "template", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "Topic: {topic}\n\nBuild a fun blog post about this topic." - }, - "topic": { - "advanced": false, - "display_name": "topic", - "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "topic", - "password": false, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "" - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 412, - "id": "Prompt-0vHob", - "position": { - "x": 383.48176594858205, - "y": 804.7835051646966 - }, - "positionAbsolute": { - "x": 383.48176594858205, - "y": 804.7835051646966 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "SequentialTaskAgentComponent-GWMA1", - "node": { - "base_classes": [ - "SequentialTask" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Creates a CrewAI Task and its associated Agent.", - "display_name": "Sequential Task Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "agent_kwargs", - "task_description", - "expected_output", - "async_execution", - "previous_task" - ], - "frozen": false, - "icon": "CrewAI", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Sequential Task", - "method": "build_agent_and_task", - "name": "task_output", - "selected": "SequentialTask", - "types": [ - "SequentialTask" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "agent_kwargs": { - "_input_type": "DictInput", - "advanced": true, - "display_name": "Agent kwargs", - "dynamic": false, - "info": "Additional kwargs for the agent.", - "list": true, - "name": "agent_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "allow_code_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "async_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Async Execution", - "dynamic": false, - "info": "Boolean flag indicating asynchronous task execution.", - "list": false, - "name": "async_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "backstory": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Research has always been your thing. You can quickly find things on the web because of your skills." - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent, Task\n\nfrom langflow.base.agents.crewai.tasks import SequentialTask\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass SequentialTaskAgentComponent(Component):\n display_name = \"Sequential Task Agent\"\n description = \"Creates a CrewAI Task and its associated Agent.\"\n documentation = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n # Agent inputs\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(\n name=\"backstory\",\n display_name=\"Backstory\",\n info=\"The backstory of the agent.\",\n ),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agent's disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=False,\n advanced=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"agent_kwargs\",\n display_name=\"Agent kwargs\",\n info=\"Additional kwargs for the agent.\",\n is_list=True,\n advanced=True,\n ),\n # Task inputs\n MultilineInput(\n name=\"task_description\",\n display_name=\"Task Description\",\n info=\"Descriptive text detailing task's purpose and execution.\",\n ),\n MultilineInput(\n name=\"expected_output\",\n display_name=\"Expected Task Output\",\n info=\"Clear definition of expected task outcome.\",\n ),\n BoolInput(\n name=\"async_execution\",\n display_name=\"Async Execution\",\n value=False,\n advanced=True,\n info=\"Boolean flag indicating asynchronous task execution.\",\n ),\n # Chaining input\n HandleInput(\n name=\"previous_task\",\n display_name=\"Previous Task\",\n input_types=[\"SequentialTask\"],\n info=\"The previous task in the sequence (for chaining).\",\n required=False,\n ),\n ]\n\n outputs = [\n Output(\n display_name=\"Sequential Task\",\n name=\"task_output\",\n method=\"build_agent_and_task\",\n ),\n ]\n\n def build_agent_and_task(self) -> list[SequentialTask]:\n # Build the agent\n agent_kwargs = self.agent_kwargs or {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **agent_kwargs,\n )\n\n # Build the task\n task = Task(\n description=self.task_description,\n expected_output=self.expected_output,\n agent=agent,\n async_execution=self.async_execution,\n )\n\n # If there's a previous task, create a list of tasks\n if self.previous_task:\n if isinstance(self.previous_task, list):\n tasks = self.previous_task + [task]\n else:\n tasks = [self.previous_task, task]\n else:\n tasks = [task]\n\n self.status = f\"Agent: {repr(agent)}\\nTask: {repr(task)}\"\n return tasks\n" - }, - "expected_output": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Expected Task Output", - "dynamic": false, - "info": "Clear definition of expected task outcome.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "expected_output", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Bullet points and small phrases about the research topic." - }, - "goal": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Search Google to find information to complete the task." - }, - "llm": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "previous_task": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Previous Task", - "dynamic": false, - "info": "The previous task in the sequence (for chaining).", - "input_types": [ - "SequentialTask" - ], - "list": false, - "name": "previous_task", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "role": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Researcher" - }, - "task_description": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Task Description", - "dynamic": false, - "info": "Descriptive text detailing task's purpose and execution.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "task_description", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agent's disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "SequentialTaskAgentComponent" - }, - "dragging": false, - "height": 810, - "id": "SequentialTaskAgentComponent-GWMA1", - "position": { - "x": -742.6676461208307, - "y": -336.7987303380612 - }, - "positionAbsolute": { - "x": -742.6676461208307, - "y": -336.7987303380612 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "SequentialTaskAgentComponent-5i4Wg", - "node": { - "base_classes": [ - "SequentialTask" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Creates a CrewAI Task and its associated Agent.", - "display_name": "Sequential Task Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "agent_kwargs", - "task_description", - "expected_output", - "async_execution", - "previous_task" - ], - "frozen": false, - "icon": "CrewAI", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Sequential Task", - "method": "build_agent_and_task", - "name": "task_output", - "selected": "SequentialTask", - "types": [ - "SequentialTask" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "agent_kwargs": { - "_input_type": "DictInput", - "advanced": true, - "display_name": "Agent kwargs", - "dynamic": false, - "info": "Additional kwargs for the agent.", - "list": true, - "name": "agent_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "allow_code_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "async_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Async Execution", - "dynamic": false, - "info": "Boolean flag indicating asynchronous task execution.", - "list": false, - "name": "async_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "backstory": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You are the editor of the most reputable journal in the world." - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent, Task\n\nfrom langflow.base.agents.crewai.tasks import SequentialTask\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass SequentialTaskAgentComponent(Component):\n display_name = \"Sequential Task Agent\"\n description = \"Creates a CrewAI Task and its associated Agent.\"\n documentation = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n # Agent inputs\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(\n name=\"backstory\",\n display_name=\"Backstory\",\n info=\"The backstory of the agent.\",\n ),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agent's disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=False,\n advanced=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"agent_kwargs\",\n display_name=\"Agent kwargs\",\n info=\"Additional kwargs for the agent.\",\n is_list=True,\n advanced=True,\n ),\n # Task inputs\n MultilineInput(\n name=\"task_description\",\n display_name=\"Task Description\",\n info=\"Descriptive text detailing task's purpose and execution.\",\n ),\n MultilineInput(\n name=\"expected_output\",\n display_name=\"Expected Task Output\",\n info=\"Clear definition of expected task outcome.\",\n ),\n BoolInput(\n name=\"async_execution\",\n display_name=\"Async Execution\",\n value=False,\n advanced=True,\n info=\"Boolean flag indicating asynchronous task execution.\",\n ),\n # Chaining input\n HandleInput(\n name=\"previous_task\",\n display_name=\"Previous Task\",\n input_types=[\"SequentialTask\"],\n info=\"The previous task in the sequence (for chaining).\",\n required=False,\n ),\n ]\n\n outputs = [\n Output(\n display_name=\"Sequential Task\",\n name=\"task_output\",\n method=\"build_agent_and_task\",\n ),\n ]\n\n def build_agent_and_task(self) -> list[SequentialTask]:\n # Build the agent\n agent_kwargs = self.agent_kwargs or {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **agent_kwargs,\n )\n\n # Build the task\n task = Task(\n description=self.task_description,\n expected_output=self.expected_output,\n agent=agent,\n async_execution=self.async_execution,\n )\n\n # If there's a previous task, create a list of tasks\n if self.previous_task:\n if isinstance(self.previous_task, list):\n tasks = self.previous_task + [task]\n else:\n tasks = [self.previous_task, task]\n else:\n tasks = [task]\n\n self.status = f\"Agent: {repr(agent)}\\nTask: {repr(task)}\"\n return tasks\n" - }, - "expected_output": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Expected Task Output", - "dynamic": false, - "info": "Clear definition of expected task outcome.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "expected_output", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Small paragraphs and bullet points with the corrected content." - }, - "goal": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You should edit the Information provided by the Researcher to make it more palatable and to not contain misleading information." - }, - "llm": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "previous_task": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Previous Task", - "dynamic": false, - "info": "The previous task in the sequence (for chaining).", - "input_types": [ - "SequentialTask" - ], - "list": false, - "name": "previous_task", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "role": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Editor" - }, - "task_description": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Task Description", - "dynamic": false, - "info": "Descriptive text detailing task's purpose and execution.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "task_description", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agent's disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "SequentialTaskAgentComponent" - }, - "dragging": false, - "height": 810, - "id": "SequentialTaskAgentComponent-5i4Wg", - "position": { - "x": 62.10105154443647, - "y": -336.82282969954827 - }, - "positionAbsolute": { - "x": 62.10105154443647, - "y": -336.82282969954827 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "SequentialTaskAgentComponent-TPEWE", - "node": { - "base_classes": [ - "SequentialTask" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Creates a CrewAI Task and its associated Agent.", - "display_name": "Sequential Task Agent", - "documentation": "https://docs.crewai.com/how-to/LLM-Connections/", - "edited": false, - "field_order": [ - "role", - "goal", - "backstory", - "tools", - "llm", - "memory", - "verbose", - "allow_delegation", - "allow_code_execution", - "agent_kwargs", - "task_description", - "expected_output", - "async_execution", - "previous_task" - ], - "frozen": false, - "icon": "CrewAI", - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Sequential Task", - "method": "build_agent_and_task", - "name": "task_output", - "selected": "SequentialTask", - "types": [ - "SequentialTask" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "agent_kwargs": { - "_input_type": "DictInput", - "advanced": true, - "display_name": "Agent kwargs", - "dynamic": false, - "info": "Additional kwargs for the agent.", - "list": true, - "name": "agent_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "allow_code_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Code Execution", - "dynamic": false, - "info": "Whether the agent is allowed to execute code.", - "list": false, - "name": "allow_code_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "allow_delegation": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Allow Delegation", - "dynamic": false, - "info": "Whether the agent is allowed to delegate tasks to other agents.", - "list": false, - "name": "allow_delegation", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "async_execution": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Async Execution", - "dynamic": false, - "info": "Boolean flag indicating asynchronous task execution.", - "list": false, - "name": "async_execution", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "backstory": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Backstory", - "dynamic": false, - "info": "The backstory of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "backstory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Your formal occupation is Comedian-in-Chief. You write jokes, do standup comedy and write funny articles." - }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from crewai import Agent, Task\n\nfrom langflow.base.agents.crewai.tasks import SequentialTask\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, DictInput, HandleInput, MultilineInput, Output\n\n\nclass SequentialTaskAgentComponent(Component):\n display_name = \"Sequential Task Agent\"\n description = \"Creates a CrewAI Task and its associated Agent.\"\n documentation = \"https://docs.crewai.com/how-to/LLM-Connections/\"\n icon = \"CrewAI\"\n\n inputs = [\n # Agent inputs\n MultilineInput(name=\"role\", display_name=\"Role\", info=\"The role of the agent.\"),\n MultilineInput(name=\"goal\", display_name=\"Goal\", info=\"The objective of the agent.\"),\n MultilineInput(\n name=\"backstory\",\n display_name=\"Backstory\",\n info=\"The backstory of the agent.\",\n ),\n HandleInput(\n name=\"tools\",\n display_name=\"Tools\",\n input_types=[\"Tool\"],\n is_list=True,\n info=\"Tools at agent's disposal\",\n value=[],\n ),\n HandleInput(\n name=\"llm\",\n display_name=\"Language Model\",\n info=\"Language model that will run the agent.\",\n input_types=[\"LanguageModel\"],\n ),\n BoolInput(\n name=\"memory\",\n display_name=\"Memory\",\n info=\"Whether the agent should have memory or not\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"verbose\",\n display_name=\"Verbose\",\n advanced=True,\n value=True,\n ),\n BoolInput(\n name=\"allow_delegation\",\n display_name=\"Allow Delegation\",\n info=\"Whether the agent is allowed to delegate tasks to other agents.\",\n value=False,\n advanced=True,\n ),\n BoolInput(\n name=\"allow_code_execution\",\n display_name=\"Allow Code Execution\",\n info=\"Whether the agent is allowed to execute code.\",\n value=False,\n advanced=True,\n ),\n DictInput(\n name=\"agent_kwargs\",\n display_name=\"Agent kwargs\",\n info=\"Additional kwargs for the agent.\",\n is_list=True,\n advanced=True,\n ),\n # Task inputs\n MultilineInput(\n name=\"task_description\",\n display_name=\"Task Description\",\n info=\"Descriptive text detailing task's purpose and execution.\",\n ),\n MultilineInput(\n name=\"expected_output\",\n display_name=\"Expected Task Output\",\n info=\"Clear definition of expected task outcome.\",\n ),\n BoolInput(\n name=\"async_execution\",\n display_name=\"Async Execution\",\n value=False,\n advanced=True,\n info=\"Boolean flag indicating asynchronous task execution.\",\n ),\n # Chaining input\n HandleInput(\n name=\"previous_task\",\n display_name=\"Previous Task\",\n input_types=[\"SequentialTask\"],\n info=\"The previous task in the sequence (for chaining).\",\n required=False,\n ),\n ]\n\n outputs = [\n Output(\n display_name=\"Sequential Task\",\n name=\"task_output\",\n method=\"build_agent_and_task\",\n ),\n ]\n\n def build_agent_and_task(self) -> list[SequentialTask]:\n # Build the agent\n agent_kwargs = self.agent_kwargs or {}\n agent = Agent(\n role=self.role,\n goal=self.goal,\n backstory=self.backstory,\n llm=self.llm,\n verbose=self.verbose,\n memory=self.memory,\n tools=self.tools if self.tools else [],\n allow_delegation=self.allow_delegation,\n allow_code_execution=self.allow_code_execution,\n **agent_kwargs,\n )\n\n # Build the task\n task = Task(\n description=self.task_description,\n expected_output=self.expected_output,\n agent=agent,\n async_execution=self.async_execution,\n )\n\n # If there's a previous task, create a list of tasks\n if self.previous_task:\n if isinstance(self.previous_task, list):\n tasks = self.previous_task + [task]\n else:\n tasks = [self.previous_task, task]\n else:\n tasks = [task]\n\n self.status = f\"Agent: {repr(agent)}\\nTask: {repr(task)}\"\n return tasks\n" - }, - "expected_output": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Expected Task Output", - "dynamic": false, - "info": "Clear definition of expected task outcome.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "expected_output", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "A small blog about the topic." - }, - "goal": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Goal", - "dynamic": false, - "info": "The objective of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "goal", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "You write comedic content based on the information provided by the editor." - }, - "llm": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Language Model", - "dynamic": false, - "info": "Language model that will run the agent.", - "input_types": [ - "LanguageModel" - ], - "list": false, - "name": "llm", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "memory": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Memory", - "dynamic": false, - "info": "Whether the agent should have memory or not", - "list": false, - "name": "memory", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - }, - "previous_task": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Previous Task", - "dynamic": false, - "info": "The previous task in the sequence (for chaining).", - "input_types": [ - "SequentialTask" - ], - "list": false, - "name": "previous_task", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "role": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Role", - "dynamic": false, - "info": "The role of the agent.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "role", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "Comedian" - }, - "task_description": { - "_input_type": "MultilineInput", - "advanced": false, - "display_name": "Task Description", - "dynamic": false, - "info": "Descriptive text detailing task's purpose and execution.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "multiline": true, - "name": "task_description", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "tools": { - "_input_type": "HandleInput", - "advanced": false, - "display_name": "Tools", - "dynamic": false, - "info": "Tools at agent's disposal", - "input_types": [ - "Tool" - ], - "list": true, - "name": "tools", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": [] - }, - "verbose": { - "_input_type": "BoolInput", - "advanced": true, - "display_name": "Verbose", - "dynamic": false, - "info": "", - "list": false, - "name": "verbose", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "SequentialTaskAgentComponent" - }, - "dragging": false, - "height": 810, - "id": "SequentialTaskAgentComponent-TPEWE", - "position": { - "x": 800.6536575540351, - "y": -345.48877618011403 - }, - "positionAbsolute": { - "x": 800.6536575540351, - "y": -345.48877618011403 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "id": "YFinanceTool-Asoka", - "node": { - "base_classes": [ - "Tool" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Tool for interacting with Yahoo Finance News.", - "display_name": "Yahoo Finance News Tool", - "documentation": "", - "edited": false, - "field_order": [], - "frozen": false, - "lf_version": "1.0.15", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Tool", - "method": "build_tool", - "name": "tool", - "selected": "Tool", - "types": [ - "Tool" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from typing import cast\n\nfrom langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import Tool\nfrom langflow.io import Output\n\n\nclass YfinanceToolComponent(Component):\n display_name = \"Yahoo Finance News Tool\"\n description = \"Tool for interacting with Yahoo Finance News.\"\n name = \"YFinanceTool\"\n\n outputs = [\n Output(display_name=\"Tool\", name=\"tool\", method=\"build_tool\"),\n ]\n\n def build_tool(self) -> Tool:\n return cast(Tool, YahooFinanceNewsTool())\n" - } - } - }, - "type": "YFinanceTool" - }, - "dragging": false, - "height": 216, - "id": "YFinanceTool-Asoka", - "position": { - "x": -1395.322640611045, - "y": -397.3732940326163 - }, - "positionAbsolute": { - "x": -1395.322640611045, - "y": -397.3732940326163 - }, - "selected": false, - "type": "genericNode", - "width": 384 - } - ], - "viewport": { - "x": 573.0114453101413, - "y": 469.2263376930288, - "zoom": 0.29771421234125156 - } - }, - "description": "This Agent runs tasks in a predefined sequence.", - "endpoint_name": null, - "id": "c2e3229c-813e-43eb-9041-cdfcfab90e08", - "is_component": false, - "last_tested_version": "1.0.15", - "name": "Sequential Tasks Agent" -} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Tasks Agents .json b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Tasks Agents .json new file mode 100644 index 000000000000..aa3d79bf4ca4 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Tasks Agents .json @@ -0,0 +1,3495 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-rPwbg", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_prompt", + "id": "Agent-rH74C", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-rPwbg{œdataTypeœ:œPromptœ,œidœ:œPrompt-rPwbgœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-Agent-rH74C{œfieldNameœ:œsystem_promptœ,œidœ:œAgent-rH74Cœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-rPwbg", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-rPwbgœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-rH74C", + "targetHandle": "{œfieldNameœ: œsystem_promptœ, œidœ: œAgent-rH74Cœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-DGXf4", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_prompt", + "id": "Agent-vIPAK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-DGXf4{œdataTypeœ:œPromptœ,œidœ:œPrompt-DGXf4œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-Agent-vIPAK{œfieldNameœ:œsystem_promptœ,œidœ:œAgent-vIPAKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-DGXf4", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-DGXf4œ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-vIPAK", + "targetHandle": "{œfieldNameœ: œsystem_promptœ, œidœ: œAgent-vIPAKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-rH74C", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-oAzS1", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-rH74C{œdataTypeœ:œAgentœ,œidœ:œAgent-rH74Cœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-oAzS1{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-oAzS1œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-rH74C", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-rH74Cœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-oAzS1", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-oAzS1œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "CalculatorTool", + "id": "CalculatorTool-xo5ux", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-rH74C", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-CalculatorTool-xo5ux{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-xo5uxœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-rH74C{œfieldNameœ:œtoolsœ,œidœ:œAgent-rH74Cœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "CalculatorTool-xo5ux", + "sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-xo5uxœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-rH74C", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-rH74Cœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "YahooFinanceTool", + "id": "YahooFinanceTool-YmOKx", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-vIPAK", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-YahooFinanceTool-YmOKx{œdataTypeœ:œYahooFinanceToolœ,œidœ:œYahooFinanceTool-YmOKxœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-vIPAK{œfieldNameœ:œtoolsœ,œidœ:œAgent-vIPAKœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "YahooFinanceTool-YmOKx", + "sourceHandle": "{œdataTypeœ: œYahooFinanceToolœ, œidœ: œYahooFinanceTool-YmOKxœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-vIPAK", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-vIPAKœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-vIPAK", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "finance_agent_output", + "id": "Prompt-rPwbg", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-vIPAK{œdataTypeœ:œAgentœ,œidœ:œAgent-vIPAKœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Prompt-rPwbg{œfieldNameœ:œfinance_agent_outputœ,œidœ:œPrompt-rPwbgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Agent-vIPAK", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-vIPAKœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rPwbg", + "targetHandle": "{œfieldNameœ: œfinance_agent_outputœ, œidœ: œPrompt-rPwbgœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-3mEtf", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-uaR2o", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-3mEtf{œdataTypeœ:œChatInputœ,œidœ:œChatInput-3mEtfœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-uaR2o{œfieldNameœ:œinput_valueœ,œidœ:œAgent-uaR2oœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-3mEtf", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-3mEtfœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-uaR2o", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-uaR2oœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-BS8ii", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_prompt", + "id": "Agent-uaR2o", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-BS8ii{œdataTypeœ:œPromptœ,œidœ:œPrompt-BS8iiœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-Agent-uaR2o{œfieldNameœ:œsystem_promptœ,œidœ:œAgent-uaR2oœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-BS8ii", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-BS8iiœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-uaR2o", + "targetHandle": "{œfieldNameœ: œsystem_promptœ, œidœ: œAgent-uaR2oœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TavilyAISearch", + "id": "TavilyAISearch-YfG8u", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-uaR2o", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-TavilyAISearch-YfG8u{œdataTypeœ:œTavilyAISearchœ,œidœ:œTavilyAISearch-YfG8uœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-uaR2o{œfieldNameœ:œtoolsœ,œidœ:œAgent-uaR2oœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "TavilyAISearch-YfG8u", + "sourceHandle": "{œdataTypeœ: œTavilyAISearchœ, œidœ: œTavilyAISearch-YfG8uœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-uaR2o", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-uaR2oœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-uaR2o", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-vIPAK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-uaR2o{œdataTypeœ:œAgentœ,œidœ:œAgent-uaR2oœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Agent-vIPAK{œfieldNameœ:œinput_valueœ,œidœ:œAgent-vIPAKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-uaR2o", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-uaR2oœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-vIPAK", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-vIPAKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-uaR2o", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "research_agent_output", + "id": "Prompt-rPwbg", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-uaR2o{œdataTypeœ:œAgentœ,œidœ:œAgent-uaR2oœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Prompt-rPwbg{œfieldNameœ:œresearch_agent_outputœ,œidœ:œPrompt-rPwbgœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Agent-uaR2o", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-uaR2oœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-rPwbg", + "targetHandle": "{œfieldNameœ: œresearch_agent_outputœ, œidœ: œPrompt-rPwbgœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-oAzS1", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-oAzS1", + "position": { + "x": 1239.222567317785, + "y": -920.0283175735606 + }, + "positionAbsolute": { + "x": 1239.222567317785, + "y": -920.0283175735606 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Finance Agent", + "id": "Agent-vIPAK", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Finance Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Finance Agent" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are the chief editor of a prestigious publication known for transforming complex information into clear, engaging content. Review and refine the researcher's document about {topic}.\n\nYour editing process should:\n- Verify and challenge any questionable claims\n- Restructure content for better flow and readability\n- Remove redundancies and unclear statements\n- Add context where needed\n- Ensure balanced coverage of the topic\n- Transform technical language into accessible explanations\n\nMaintain high editorial standards while making the content engaging for an educated general audience. Present the revised version in a clean, well-structured format." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-vIPAK", + "position": { + "x": 45.70736046026991, + "y": -1369.035463408626 + }, + "positionAbsolute": { + "x": 45.70736046026991, + "y": -1369.035463408626 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Analysis & Editor Agent", + "id": "Agent-rH74C", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Analysis & Editor Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Start the analysis" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Analysis & Editor Agent" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a brilliant comedy writer known for making complex topics entertaining and memorable. Using the editor's refined document about {topic}, create an engaging, humorous blog post.\n\nYour approach should:\n- Find unexpected angles and amusing parallels\n- Use clever wordplay and wit (avoid cheap jokes)\n- Maintain accuracy while being entertaining\n- Include relatable examples and analogies\n- Keep a smart, sophisticated tone\n- Make the topic more approachable through humor\n\nCreate a blog post that makes people laugh while actually teaching them about {topic}. The humor should enhance, not overshadow, the educational value." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-rH74C", + "position": { + "x": 815.1900903820148, + "y": -1365.4053932711827 + }, + "positionAbsolute": { + "x": 815.1900903820148, + "y": -1365.4053932711827 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-BS8ii", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": true, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "# Expert Research Agent Protocol\n\n[Previous content remains the same, but adding this critical section about image handling:]\n\n## Image and Visual Data Handling\nWhen using Tavily Search with images enabled:\n\n1. Image Collection\n - Always enable include_images in Tavily search\n - Collect relevant stock charts, product images, and news photos\n - Save image URLs from reliable sources\n - Focus on recent, high-quality images\n\n2. Image Categories to Collect\n - Product showcase images\n - Stock performance charts\n - Company facilities\n - Key executive photos\n - Recent event images\n - Market share visualizations\n\n3. Image Documentation\n - Include full image URL\n - Add clear descriptions\n - Note image source and date\n - Explain image relevance\n\n4. Image Presentation in Output\n ```markdown\n ![Image Description](image_url)\n - Source: [Source Name]\n - Date: [Image Date]\n - Context: [Brief explanation of image relevance]\n ```\n\n## Output Structure\nPresent your findings in this format:\n\n### Company Overview\n[Comprehensive overview based on search results]\n\n### Recent Developments\n[Latest news and announcements with dates]\n\n### Market Context\n[Industry trends and competitive position]\n\n### Visual Insights\n[Reference relevant images from search]\n\n### Key Risk Factors\n[Identified risks and challenges]\n\n### Sources\n[List of key sources consulted]\n\nRemember to:\n- Use Markdown formatting for clear structure\n- Include dates for all time-sensitive information\n- Quote significant statistics and statements\n- Reference any included images\n- Highlight conflicting information or viewpoints\n- Pass all gathered data to the Finance Agent for detailed financial analysis" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-BS8ii", + "position": { + "x": -1142.2312935529987, + "y": -1107.442614776065 + }, + "positionAbsolute": { + "x": -1142.2312935529987, + "y": -1107.442614776065 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-DGXf4", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "# Financial Analysis Expert Protocol\n\nYou are an elite financial analyst with access to Yahoo Finance tools. Your role is to perform comprehensive financial analysis based on the research provided and the data available through Yahoo Finance methods.\n\n## CRITICAL: Stock Symbol Usage\n- Always use correct stock ticker symbols in UPPERCASE format\n- Examples of valid symbols:\n * AAPL (Apple Inc.)\n * MSFT (Microsoft)\n * NVDA (NVIDIA)\n * GOOGL (Alphabet/Google)\n * TSLA (Tesla)\n- Invalid formats to avoid:\n * ❌ Apple (company name instead of symbol)\n * ❌ aapl (lowercase)\n * ❌ $AAPL (with dollar sign)\n * ❌ AAPL.US (with extension)\n\n## Data Collection Strategy\n\n1. Initial Symbol Verification\n - Confirm valid stock symbol format before any analysis\n - Use get_info first to verify symbol validity\n - Cross-reference with get_fast_info to ensure data availability\n - If symbol is invalid, immediately report the error\n\n2. Core Company Analysis\n - Get basic info (get_info): Full company details\n - Fast metrics (get_fast_info): Quick market data\n - Earnings data (get_earnings): Performance history\n - Calendar events (get_calendar): Upcoming events\n\n3. Financial Statement Analysis\n - Income statements (get_income_stmt)\n - Balance sheets (get_balance_sheet)\n - Cash flow statements (get_cashflow)\n\n4. Market Intelligence\n - Latest recommendations (get_recommendations)\n - Recommendation trends (get_recommendations_summary)\n - Recent rating changes (get_upgrades_downgrades)\n - Breaking news (get_news, specify number of articles needed)\n\n5. Ownership Structure\n - Institutional holdings (get_institutional_holders)\n - Major stakeholders (get_major_holders)\n - Fund ownership (get_mutualfund_holders)\n - Insider activity:\n * Recent purchases (get_insider_purchases)\n * Transaction history (get_insider_transactions)\n * Insider roster (get_insider_roster_holders)\n\n6. Historical Patterns\n - Corporate actions (get_actions)\n - Dividend history (get_dividends)\n - Split history (get_splits)\n - Capital gains (get_capital_gains)\n - Regulatory filings (get_sec_filings)\n - ESG metrics (get_sustainability)\n\n## Analysis Framework\n\n1. Profitability Metrics\n - Revenue trends\n - Margin analysis\n - Efficiency ratios\n - Return metrics\n\n2. Financial Health\n - Liquidity ratios\n - Debt analysis\n - Working capital\n - Cash flow quality\n\n3. Growth Assessment\n - Historical rates\n - Future projections\n - Market opportunity\n - Expansion plans\n\n4. Risk Evaluation\n - Financial risks\n - Market position\n - Operational challenges\n - Competitive threats\n\n## Output Structure\n\n### Symbol Information\n[Confirm stock symbol and basic company information]\n\n### Financial Overview\n[Key metrics summary with actual numbers]\n\n### Profitability Analysis\n[Detailed profit metrics with comparisons]\n\n### Balance Sheet Review\n[Asset and liability analysis]\n\n### Cash Flow Assessment\n[Cash generation and usage patterns]\n\n### Market Sentiment\n[Analyst views and institutional activity]\n\n### Growth Analysis\n[Historical and projected growth]\n\n### Risk Factors\n[Comprehensive risk assessment]\n\nRemember to:\n- ALWAYS verify stock symbol validity first\n- Use exact numbers from the data\n- Compare with industry standards\n- Highlight significant trends\n- Flag data anomalies\n- Identify key risks\n- Provide metric context\n- Focus on material information\n\nPass your comprehensive financial analysis to the Analysis & Editor Agent for final synthesis and recommendations. Include any invalid symbol errors or data availability issues in your report." + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 260, + "id": "Prompt-DGXf4", + "position": { + "x": -344.9674638932195, + "y": -1280.1782190739505 + }, + "positionAbsolute": { + "x": -344.9674638932195, + "y": -1280.1782190739505 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-rPwbg", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["research_agent_output", "finance_agent_output"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "finance_agent_output": { + "advanced": false, + "display_name": "finance_agent_output", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "finance_agent_output", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "research_agent_output": { + "advanced": false, + "display_name": "research_agent_output", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "research_agent_output", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "# Investment Analysis & Editorial Protocol\n\nYou are an elite financial analyst and editorial expert responsible for creating the final investment analysis report. Your role is to synthesize research and financial data into a visually appealing, data-rich investment analysis using proper markdown formatting.\n\n## Input Processing\n1. Research Agent Input (Visual + Market Research):\n - Market research and news\n - Industry trends\n - Competitive analysis\n - Images and charts\n - News sentiment\n - {research_agent_output}\n\n2. Finance Agent Input (Quantitative Data):\n - Detailed financial metrics\n - Stock statistics\n - Analyst ratings\n - Growth metrics\n - Risk factors\n - {finance_agent_output}\n\n## Output Format Requirements\n\n1. Header Format\n Use single # for main title, increment for subsections\n \n2. Image Placement\n - Place images immediately after relevant sections\n - Use proper markdown format: ![Alt Text](url)\n - Always include source and context\n - Use *italics* for image captions\n\n3. Table Formatting\n - Use standard markdown tables\n - Align numbers right, text left\n - Include header separators\n - Keep consistent column widths\n\n4. Data Presentation\n - Use bold (**) for key metrics\n - Include percentage changes\n - Show comparisons\n - Include trends (↑/↓)\n\n## Report Structure\n\n# Investment Analysis Report: [Company Name] ($SYMBOL)\n*Generated: [Date] | Type: Comprehensive Evaluation*\n\n[Executive Summary - 3 paragraphs max]\n\n## Quick Take\n- **Recommendation**: [BUY/HOLD/SELL]\n- **Target Price**: $XXX\n- **Risk Level**: [LOW/MEDIUM/HIGH]\n- **Investment Horizon**: [SHORT/MEDIUM/LONG]-term\n\n## Market Analysis\n[Insert most relevant market image here]\n*Source: [Name] - [Context]*\n\n### Industry Position\n- Market share data\n- Competitive analysis\n- Recent developments\n\n## Financial Health\n| Metric | Value | YoY Change | Industry Avg |\n|:-------|------:|-----------:|-------------:|\n| Revenue | $XXX | XX% | $XXX |\n[Additional metrics]\n\n### Key Performance Indicators\n- **Revenue Growth**: XX%\n- **Profit Margin**: XX%\n- **ROE**: XX%\n\n## Growth Drivers\n1. Short-term Catalysts\n2. Long-term Opportunities\n3. Innovation Pipeline\n\n## Risk Assessment\n| Risk Factor | Severity | Probability | Impact |\n|:------------|:---------|:------------|:-------|\n| [Risk 1] | HIGH/MED/LOW | H/M/L | Details |\n\n## Technical Analysis\n[Insert technical chart]\n*Source: [Name] - Analysis of key technical indicators*\n\n## Investment Strategy\n### Long-term (18+ months)\n- Entry points\n- Position sizing\n- Risk management\n\n### Medium-term (6-18 months)\n- Technical levels\n- Catalysts timeline\n\n### Short-term (0-6 months)\n- Support/Resistance\n- Trading parameters\n\n## Price Targets\n- **Bear Case**: $XXX (-XX%)\n- **Base Case**: $XXX\n- **Bull Case**: $XXX (+XX%)\n\n## Monitoring Checklist\n1. [Metric 1]\n2. [Metric 2]\n3. [Metric 3]\n\n## Visual Evidence\n[Insert additional relevant images]\n*Source: [Name] - [Specific context and analysis]*\n\n*Disclaimer: This analysis is for informational purposes only. Always conduct your own research before making investment decisions.*\n\n## Output Requirements\n\n1. Visual Excellence\n - Strategic image placement\n - Clear data visualization\n - Consistent formatting\n - Professional appearance\n\n2. Data Accuracy\n - Cross-reference numbers\n - Verify calculations\n - Include trends\n - Show comparisons\n\n3. Action Focus\n - Clear recommendations\n - Specific entry/exit points\n - Risk management guidelines\n - Monitoring triggers\n\n4. Professional Standards\n - No spelling errors\n - Consistent formatting\n - Proper citations\n - Clear attribution\n\nRemember:\n- Never use triple backticks\n- Include all images with proper markdown\n- Maintain consistent formatting\n- Provide specific, actionable insights\n- Use emojis sparingly and professionally\n- Cross-validate all data points" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 433, + "id": "Prompt-rPwbg", + "position": { + "x": 416.02309796632085, + "y": -1081.5957453651372 + }, + "positionAbsolute": { + "x": 416.02309796632085, + "y": -1081.5957453651372 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatInput-3mEtf", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": true, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Should I invest in Tesla (TSLA) stock right now? Please analyze the company's current position, market trends, financial health, and provide a clear investment recommendation." + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-3mEtf", + "position": { + "x": -1510.6054210793818, + "y": -947.702056394023 + }, + "positionAbsolute": { + "x": -1510.6054210793818, + "y": -947.702056394023 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "CalculatorTool-xo5ux", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Perform basic arithmetic operations on a given expression.", + "display_name": "Calculator", + "documentation": "", + "edited": false, + "field_order": ["expression"], + "frozen": false, + "icon": "calculator", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": [], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": [], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import ast\nimport operator\n\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import MessageTextInput\nfrom langflow.schema import Data\n\n\nclass CalculatorToolComponent(LCToolComponent):\n display_name = \"Calculator\"\n description = \"Perform basic arithmetic operations on a given expression.\"\n icon = \"calculator\"\n name = \"CalculatorTool\"\n\n inputs = [\n MessageTextInput(\n name=\"expression\",\n display_name=\"Expression\",\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\n tool_mode=True,\n ),\n ]\n\n class CalculatorToolSchema(BaseModel):\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\n\n def run_model(self) -> list[Data]:\n return self._evaluate_expression(self.expression)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"calculator\",\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\n func=self._eval_expr_with_error,\n args_schema=self.CalculatorToolSchema,\n )\n\n def _eval_expr(self, node):\n # Define the allowed operators\n operators = {\n ast.Add: operator.add,\n ast.Sub: operator.sub,\n ast.Mult: operator.mul,\n ast.Div: operator.truediv,\n ast.Pow: operator.pow,\n }\n if isinstance(node, ast.Num):\n return node.n\n if isinstance(node, ast.BinOp):\n return operators[type(node.op)](self._eval_expr(node.left), self._eval_expr(node.right))\n if isinstance(node, ast.UnaryOp):\n return operators[type(node.op)](self._eval_expr(node.operand))\n if isinstance(node, ast.Call):\n msg = (\n \"Function calls like sqrt(), sin(), cos() etc. are not supported. \"\n \"Only basic arithmetic operations (+, -, *, /, **) are allowed.\"\n )\n raise TypeError(msg)\n msg = f\"Unsupported operation or expression type: {type(node).__name__}\"\n raise TypeError(msg)\n\n def _eval_expr_with_error(self, expression: str) -> list[Data]:\n try:\n return self._evaluate_expression(expression)\n except Exception as e:\n raise ToolException(str(e)) from e\n\n def _evaluate_expression(self, expression: str) -> list[Data]:\n try:\n # Parse the expression and evaluate it\n tree = ast.parse(expression, mode=\"eval\")\n result = self._eval_expr(tree.body)\n\n # Format the result to a reasonable number of decimal places\n formatted_result = f\"{result:.6f}\".rstrip(\"0\").rstrip(\".\")\n\n self.status = formatted_result\n return [Data(data={\"result\": formatted_result})]\n\n except (SyntaxError, TypeError, KeyError) as e:\n error_message = f\"Invalid expression: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except ZeroDivisionError:\n error_message = \"Error: Division by zero\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except Exception as e: # noqa: BLE001\n logger.opt(exception=True).debug(\"Error evaluating expression\")\n error_message = f\"Error: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n" + }, + "expression": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Expression", + "dynamic": false, + "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "expression", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "CalculatorTool" + }, + "dragging": false, + "height": 254, + "id": "CalculatorTool-xo5ux", + "position": { + "x": 415.51528601650625, + "y": -603.8178818852236 + }, + "positionAbsolute": { + "x": 415.51528601650625, + "y": -603.8178818852236 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "id": "TavilyAISearch-YfG8u", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "**Tavily AI** is a search engine optimized for LLMs and RAG, aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n", + "display_name": "Tavily AI Search", + "documentation": "https://docs.tavily.com/", + "edited": false, + "field_order": [ + "api_key", + "query", + "search_depth", + "topic", + "max_results", + "include_images", + "include_answer" + ], + "frozen": false, + "icon": "TavilyIcon", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": ["api_key"], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": ["api_key"], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Tavily API Key", + "dynamic": false, + "info": "Your Tavily API Key.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from enum import Enum\n\nimport httpx\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass TavilySearchDepth(Enum):\n BASIC = \"basic\"\n ADVANCED = \"advanced\"\n\n\nclass TavilySearchTopic(Enum):\n GENERAL = \"general\"\n NEWS = \"news\"\n\n\nclass TavilySearchSchema(BaseModel):\n query: str = Field(..., description=\"The search query you want to execute with Tavily.\")\n search_depth: TavilySearchDepth = Field(TavilySearchDepth.BASIC, description=\"The depth of the search.\")\n topic: TavilySearchTopic = Field(TavilySearchTopic.GENERAL, description=\"The category of the search.\")\n max_results: int = Field(5, description=\"The maximum number of search results to return.\")\n include_images: bool = Field(default=False, description=\"Include a list of query-related images in the response.\")\n include_answer: bool = Field(default=False, description=\"Include a short answer to original query.\")\n\n\nclass TavilySearchToolComponent(LCToolComponent):\n display_name = \"Tavily AI Search\"\n description = \"\"\"**Tavily AI** is a search engine optimized for LLMs and RAG, \\\n aimed at efficient, quick, and persistent search results. It can be used independently or as an agent tool.\n\nNote: Check 'Advanced' for all options.\n\"\"\"\n icon = \"TavilyIcon\"\n name = \"TavilyAISearch\"\n documentation = \"https://docs.tavily.com/\"\n\n inputs = [\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Tavily API Key\",\n required=True,\n info=\"Your Tavily API Key.\",\n ),\n MessageTextInput(\n name=\"query\",\n display_name=\"Search Query\",\n info=\"The search query you want to execute with Tavily.\",\n ),\n DropdownInput(\n name=\"search_depth\",\n display_name=\"Search Depth\",\n info=\"The depth of the search.\",\n options=list(TavilySearchDepth),\n value=TavilySearchDepth.ADVANCED,\n advanced=True,\n ),\n DropdownInput(\n name=\"topic\",\n display_name=\"Search Topic\",\n info=\"The category of the search.\",\n options=list(TavilySearchTopic),\n value=TavilySearchTopic.GENERAL,\n advanced=True,\n ),\n IntInput(\n name=\"max_results\",\n display_name=\"Max Results\",\n info=\"The maximum number of search results to return.\",\n value=5,\n advanced=True,\n ),\n BoolInput(\n name=\"include_images\",\n display_name=\"Include Images\",\n info=\"Include a list of query-related images in the response.\",\n value=True,\n advanced=True,\n ),\n BoolInput(\n name=\"include_answer\",\n display_name=\"Include Answer\",\n info=\"Include a short answer to original query.\",\n value=True,\n advanced=True,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n # Convert string values to enum instances with validation\n try:\n search_depth_enum = (\n self.search_depth\n if isinstance(self.search_depth, TavilySearchDepth)\n else TavilySearchDepth(str(self.search_depth).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid search depth value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n try:\n topic_enum = (\n self.topic if isinstance(self.topic, TavilySearchTopic) else TavilySearchTopic(str(self.topic).lower())\n )\n except ValueError as e:\n error_message = f\"Invalid topic value: {e!s}\"\n self.status = error_message\n return [Data(data={\"error\": error_message})]\n\n return self._tavily_search(\n self.query,\n search_depth=search_depth_enum,\n topic=topic_enum,\n max_results=self.max_results,\n include_images=self.include_images,\n include_answer=self.include_answer,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"tavily_search\",\n description=\"Perform a web search using the Tavily API.\",\n func=self._tavily_search,\n args_schema=TavilySearchSchema,\n )\n\n def _tavily_search(\n self,\n query: str,\n *,\n search_depth: TavilySearchDepth = TavilySearchDepth.BASIC,\n topic: TavilySearchTopic = TavilySearchTopic.GENERAL,\n max_results: int = 5,\n include_images: bool = False,\n include_answer: bool = False,\n ) -> list[Data]:\n # Validate enum values\n if not isinstance(search_depth, TavilySearchDepth):\n msg = f\"Invalid search_depth value: {search_depth}\"\n raise TypeError(msg)\n if not isinstance(topic, TavilySearchTopic):\n msg = f\"Invalid topic value: {topic}\"\n raise TypeError(msg)\n\n try:\n url = \"https://api.tavily.com/search\"\n headers = {\n \"content-type\": \"application/json\",\n \"accept\": \"application/json\",\n }\n payload = {\n \"api_key\": self.api_key,\n \"query\": query,\n \"search_depth\": search_depth.value,\n \"topic\": topic.value,\n \"max_results\": max_results,\n \"include_images\": include_images,\n \"include_answer\": include_answer,\n }\n\n with httpx.Client() as client:\n response = client.post(url, json=payload, headers=headers)\n\n response.raise_for_status()\n search_results = response.json()\n\n data_results = [\n Data(\n data={\n \"title\": result.get(\"title\"),\n \"url\": result.get(\"url\"),\n \"content\": result.get(\"content\"),\n \"score\": result.get(\"score\"),\n }\n )\n for result in search_results.get(\"results\", [])\n ]\n\n if include_answer and search_results.get(\"answer\"):\n data_results.insert(0, Data(data={\"answer\": search_results[\"answer\"]}))\n\n if include_images and search_results.get(\"images\"):\n data_results.append(Data(data={\"images\": search_results[\"images\"]}))\n\n self.status = data_results # type: ignore[assignment]\n\n except httpx.HTTPStatusError as e:\n error_message = f\"HTTP error: {e.response.status_code} - {e.response.text}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n except Exception as e:\n error_message = f\"Unexpected error: {e}\"\n logger.opt(exception=True).debug(\"Error running Tavily Search\")\n self.status = error_message\n raise ToolException(error_message) from e\n return data_results\n" + }, + "include_answer": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Answer", + "dynamic": false, + "info": "Include a short answer to original query.", + "list": false, + "name": "include_answer", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "include_images": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Include Images", + "dynamic": false, + "info": "Include a list of query-related images in the response.", + "list": false, + "name": "include_images", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "max_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Results", + "dynamic": false, + "info": "The maximum number of search results to return.", + "list": false, + "name": "max_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "query": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Search Query", + "dynamic": false, + "info": "The search query you want to execute with Tavily.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "query", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "search_depth": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Depth", + "dynamic": false, + "info": "The depth of the search.", + "load_from_db": false, + "name": "search_depth", + "options": ["basic", "advanced"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "advanced" + }, + "topic": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Topic", + "dynamic": false, + "info": "The category of the search.", + "load_from_db": false, + "name": "topic", + "options": ["general", "news"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "general" + } + }, + "tool_mode": false + }, + "type": "TavilyAISearch" + }, + "dragging": false, + "height": 394, + "id": "TavilyAISearch-YfG8u", + "position": { + "x": -1132.8634419233736, + "y": -770.0391255413992 + }, + "positionAbsolute": { + "x": -1132.8634419233736, + "y": -770.0391255413992 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-8beLl", + "node": { + "description": "# Sequential Tasks Agents\n\n## Overview\nThis flow demonstrates how to chain multiple AI agents for comprehensive research and analysis. Each agent specializes in different aspects of the research process, building upon the previous agent's work.\n\n## How to Use the Flow\n\n1. **Input Your Query** 🎯\n - Be specific and clear\n - Include key aspects you want analyzed\n - Examples:\n ```\n Good: \"Should I invest in Tesla (TSLA)? Focus on AI development impact\"\n Bad: \"Tell me about Tesla\"\n ```\n\n2. **Research Agent Process** 🔍\n - Utilizes Tavily Search for comprehensive research\n\n\n3. **Specialized Analysis** 📊\n - Each agent adds unique value:\n ```\n Research Agent → Deep Research & Context\n ↓\n Finance Agent → Data Analysis & Metrics\n ↓\n Editor Agent → Final Synthesis & Report\n ```\n\n4. **Output Format** 📝\n - Structured report\n - Embedded images and charts\n - Data-backed insights\n - Clear recommendations\n\n## Pro Tips\n\n### Query Construction\n- Include specific points of interest\n- Mention required metrics or data points\n- Specify time frames if relevant\n\n### Flow Customization\n- Modify agent prompts for different use cases\n- Add or remove tools as needed\n\n## Common Applications\n- Investment Research\n- Market Analysis\n- Competitive Intelligence\n- Industry Reports\n- Technology Impact Studies\n\n⚡ **Best Practice**: Start with a test query to understand the flow's capabilities before running complex analyses.\n\n---\n*Note: This flow template uses financial analysis as an example but can be adapted for any research-intensive task requiring multiple perspectives and data sources.*", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 800, + "id": "note-8beLl", + "position": { + "x": -2122.739127560837, + "y": -1302.6582482086806 + }, + "positionAbsolute": { + "x": -2122.739127560837, + "y": -1302.6582482086806 + }, + "resizing": false, + "selected": false, + "style": { + "height": 800, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "note-tB2J2", + "node": { + "description": "## What Are Sequential Task Agents?\nA system where multiple AI agents work in sequence, each specializing in specific tasks and passing their output to the next agent in the chain. Think of it as an assembly line where each agent adds value to the final result.\n\n## How It Works\n1. **First Agent** → **Second Agent** → **Third Agent** → **Final Output**\n - Each agent receives input from the previous one\n - Processes and enhances the information\n - Passes refined output forward\n\n## Key Benefits\n- **Specialization**: Each agent focuses on specific tasks\n- **Progressive Refinement**: Information gets enhanced at each step\n- **Structured Output**: Final result combines multiple perspectives\n- **Quality Control**: Each agent validates and improves previous work\n\n## Building Your Own Sequence\n1. **Plan Your Chain**\n - Identify distinct tasks\n - Determine logical order\n - Define input/output requirements\n\n2. **Configure Agents**\n - Give each agent clear instructions\n - Ensure compatible outputs/inputs\n - Set appropriate tools for each agent\n\n3. **Connect the Flow**\n - Link agents in proper order\n - Test data flow between agents\n - Verify final output format\n\n## Example Applications\n- Research → Analysis → Report Writing\n- Data Collection → Processing → Visualization\n- Content Research → Writing → Editing\n- Market Analysis → Financial Review → Investment Advice\n\n⭐ **Pro Tip**: The strength of sequential agents comes from how well they complement each other's capabilities.\n\nThis template uses financial analysis as an example, but you can adapt it for any multi-step process requiring different expertise at each stage.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 800, + "id": "note-tB2J2", + "position": { + "x": -1456.0688717707517, + "y": -1916.6876704866322 + }, + "positionAbsolute": { + "x": -1456.0688717707517, + "y": -1916.6876704866322 + }, + "resizing": false, + "selected": false, + "style": { + "height": 800, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "YahooFinanceTool-YmOKx", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Access financial data and market information using Yahoo Finance.", + "display_name": "Yahoo Finance", + "documentation": "", + "edited": false, + "field_order": ["symbol", "method", "num_news"], + "frozen": false, + "icon": "trending-up", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": [], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": [], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import ast\nimport pprint\nfrom enum import Enum\n\nimport yfinance as yf\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import DropdownInput, IntInput, MessageTextInput\nfrom langflow.schema import Data\n\n\nclass YahooFinanceMethod(Enum):\n GET_INFO = \"get_info\"\n GET_NEWS = \"get_news\"\n GET_ACTIONS = \"get_actions\"\n GET_ANALYSIS = \"get_analysis\"\n GET_BALANCE_SHEET = \"get_balance_sheet\"\n GET_CALENDAR = \"get_calendar\"\n GET_CASHFLOW = \"get_cashflow\"\n GET_INSTITUTIONAL_HOLDERS = \"get_institutional_holders\"\n GET_RECOMMENDATIONS = \"get_recommendations\"\n GET_SUSTAINABILITY = \"get_sustainability\"\n GET_MAJOR_HOLDERS = \"get_major_holders\"\n GET_MUTUALFUND_HOLDERS = \"get_mutualfund_holders\"\n GET_INSIDER_PURCHASES = \"get_insider_purchases\"\n GET_INSIDER_TRANSACTIONS = \"get_insider_transactions\"\n GET_INSIDER_ROSTER_HOLDERS = \"get_insider_roster_holders\"\n GET_DIVIDENDS = \"get_dividends\"\n GET_CAPITAL_GAINS = \"get_capital_gains\"\n GET_SPLITS = \"get_splits\"\n GET_SHARES = \"get_shares\"\n GET_FAST_INFO = \"get_fast_info\"\n GET_SEC_FILINGS = \"get_sec_filings\"\n GET_RECOMMENDATIONS_SUMMARY = \"get_recommendations_summary\"\n GET_UPGRADES_DOWNGRADES = \"get_upgrades_downgrades\"\n GET_EARNINGS = \"get_earnings\"\n GET_INCOME_STMT = \"get_income_stmt\"\n\n\nclass YahooFinanceSchema(BaseModel):\n symbol: str = Field(..., description=\"The stock symbol to retrieve data for.\")\n method: YahooFinanceMethod = Field(YahooFinanceMethod.GET_INFO, description=\"The type of data to retrieve.\")\n num_news: int | None = Field(5, description=\"The number of news articles to retrieve.\")\n\n\nclass YfinanceToolComponent(LCToolComponent):\n display_name = \"Yahoo Finance\"\n description = \"\"\"Uses [yfinance](https://pypi.org/project/yfinance/) (unofficial package) \\\nto access financial data and market information from Yahoo Finance.\"\"\"\n icon = \"trending-up\"\n name = \"YahooFinanceTool\"\n\n inputs = [\n MessageTextInput(\n name=\"symbol\",\n display_name=\"Stock Symbol\",\n info=\"The stock symbol to retrieve data for (e.g., AAPL, GOOG).\",\n ),\n DropdownInput(\n name=\"method\",\n display_name=\"Data Method\",\n info=\"The type of data to retrieve.\",\n options=list(YahooFinanceMethod),\n value=\"get_news\",\n ),\n IntInput(\n name=\"num_news\",\n display_name=\"Number of News\",\n info=\"The number of news articles to retrieve (only applicable for get_news).\",\n value=5,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n return self._yahoo_finance_tool(\n self.symbol,\n self.method,\n self.num_news,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"yahoo_finance\",\n description=\"Access financial data and market information from Yahoo Finance.\",\n func=self._yahoo_finance_tool,\n args_schema=YahooFinanceSchema,\n )\n\n def _yahoo_finance_tool(\n self,\n symbol: str,\n method: YahooFinanceMethod,\n num_news: int | None = 5,\n ) -> list[Data]:\n ticker = yf.Ticker(symbol)\n\n try:\n if method == YahooFinanceMethod.GET_INFO:\n result = ticker.info\n elif method == YahooFinanceMethod.GET_NEWS:\n result = ticker.news[:num_news]\n else:\n result = getattr(ticker, method.value)()\n\n result = pprint.pformat(result)\n\n if method == YahooFinanceMethod.GET_NEWS:\n data_list = [Data(data=article) for article in ast.literal_eval(result)]\n else:\n data_list = [Data(data={\"result\": result})]\n\n except Exception as e:\n error_message = f\"Error retrieving data: {e}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n\n return data_list\n" + }, + "method": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Data Method", + "dynamic": false, + "info": "The type of data to retrieve.", + "name": "method", + "options": [ + "get_info", + "get_news", + "get_actions", + "get_analysis", + "get_balance_sheet", + "get_calendar", + "get_cashflow", + "get_institutional_holders", + "get_recommendations", + "get_sustainability", + "get_major_holders", + "get_mutualfund_holders", + "get_insider_purchases", + "get_insider_transactions", + "get_insider_roster_holders", + "get_dividends", + "get_capital_gains", + "get_splits", + "get_shares", + "get_fast_info", + "get_sec_filings", + "get_recommendations_summary", + "get_upgrades_downgrades", + "get_earnings", + "get_income_stmt" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "get_news" + }, + "num_news": { + "_input_type": "IntInput", + "advanced": false, + "display_name": "Number of News", + "dynamic": false, + "info": "The number of news articles to retrieve (only applicable for get_news).", + "list": false, + "name": "num_news", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "symbol": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Stock Symbol", + "dynamic": false, + "info": "The stock symbol to retrieve data for (e.g., AAPL, GOOG).", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "symbol", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AAPL" + } + }, + "tool_mode": false + }, + "type": "YahooFinanceTool" + }, + "dragging": false, + "height": 475, + "id": "YahooFinanceTool-YmOKx", + "position": { + "x": -338.2658218008318, + "y": -945.7435123503128 + }, + "positionAbsolute": { + "x": -338.2658218008318, + "y": -945.7435123503128 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Researcher Agent", + "id": "Agent-uaR2o", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Researcher Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": true, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a helpful assistant that can use tools to answer questions and perform tasks." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-uaR2o", + "position": { + "x": -715.1798010873374, + "y": -1342.256094001045 + }, + "positionAbsolute": { + "x": -715.1798010873374, + "y": -1342.256094001045 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-9ZCze", + "node": { + "description": "## Get your API key at [https://tavily.ai](https://tavily.ai)\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 324, + "id": "note-9ZCze", + "position": { + "x": -1144.3898055225054, + "y": -844.3506743985376 + }, + "positionAbsolute": { + "x": -1144.3898055225054, + "y": -844.3506743985376 + }, + "resizing": false, + "selected": false, + "style": { + "height": 324, + "width": 347 + }, + "type": "noteNode", + "width": 347 + }, + { + "data": { + "id": "note-ynS9s", + "node": { + "description": "## Configure the agent by obtaining your OpenAI API key from [platform.openai.com](https://platform.openai.com). Under \"Model Provider\", choose:\n- OpenAI: Default, requires only API key\n- Anthropic/Azure/Groq/NVIDIA: Each requires their own API keys\n- Custom: Use your own model endpoint + authentication\n\nSelect model and input API key before running the flow.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 324, + "id": "note-ynS9s", + "position": { + "x": -739.4383746675942, + "y": -1672.0874594411662 + }, + "positionAbsolute": { + "x": -739.4383746675942, + "y": -1672.0874594411662 + }, + "resizing": false, + "selected": false, + "style": { + "height": 324, + "width": 370 + }, + "type": "noteNode", + "width": 370 + } + ], + "viewport": { + "x": 988.287937756906, + "y": 1011.1045224025538, + "zoom": 0.5418943314819052 + } + }, + "description": "This Agent is designed to systematically execute a series of tasks following a meticulously predefined sequence. By adhering to this structured order, the Agent ensures that each task is completed efficiently and effectively, optimizing overall performance and maintaining a high level of accuracy.", + "endpoint_name": null, + "icon": "ListChecks", + "id": "673f26a7-66f4-410a-8ccb-3e635c022023", + "gradient": "1", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Sequential Tasks Agents", + "tags": ["assistants", "agents", "web-scraping"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Simple Agent .json b/src/backend/base/langflow/initial_setup/starter_projects/Simple Agent .json new file mode 100644 index 000000000000..6fd8cdf5d01b --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Simple Agent .json @@ -0,0 +1,1398 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "YahooFinanceTool", + "id": "YahooFinanceTool-PzHUy", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-KhAae", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-YahooFinanceTool-PzHUy{œdataTypeœ:œYahooFinanceToolœ,œidœ:œYahooFinanceTool-PzHUyœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-KhAae{œfieldNameœ:œtoolsœ,œidœ:œAgent-KhAaeœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "YahooFinanceTool-PzHUy", + "sourceHandle": "{œdataTypeœ: œYahooFinanceToolœ, œidœ: œYahooFinanceTool-PzHUyœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-KhAae", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-KhAaeœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-dBek4", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-KhAae", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-dBek4{œdataTypeœ:œChatInputœ,œidœ:œChatInput-dBek4œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-KhAae{œfieldNameœ:œinput_valueœ,œidœ:œAgent-KhAaeœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-dBek4", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-dBek4œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-KhAae", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-KhAaeœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-KhAae", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-ULcvr", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-KhAae{œdataTypeœ:œAgentœ,œidœ:œAgent-KhAaeœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-ULcvr{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-ULcvrœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-KhAae", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-KhAaeœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-ULcvr", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-ULcvrœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-KhAae", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a helpful assistant that can use tools to answer questions and perform tasks.\nUse markdown to format your answer, properly embedding images and urls." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-KhAae", + "position": { + "x": 2306.5155821255557, + "y": 335.1151630488809 + }, + "positionAbsolute": { + "x": 2306.5155821255557, + "y": 335.1151630488809 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Access financial data and market information using Yahoo Finance.", + "display_name": "Yahoo Finance Tool", + "id": "YahooFinanceTool-PzHUy", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Access financial data and market information using Yahoo Finance.", + "display_name": "Yahoo Finance Tool", + "documentation": "", + "edited": false, + "field_order": ["symbol", "method", "num_news"], + "frozen": false, + "icon": "trending-up", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": [], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": [], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import ast\nimport pprint\nfrom enum import Enum\n\nimport yfinance as yf\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import DropdownInput, IntInput, MessageTextInput\nfrom langflow.schema import Data\n\n\nclass YahooFinanceMethod(Enum):\n GET_INFO = \"get_info\"\n GET_NEWS = \"get_news\"\n GET_ACTIONS = \"get_actions\"\n GET_ANALYSIS = \"get_analysis\"\n GET_BALANCE_SHEET = \"get_balance_sheet\"\n GET_CALENDAR = \"get_calendar\"\n GET_CASHFLOW = \"get_cashflow\"\n GET_INSTITUTIONAL_HOLDERS = \"get_institutional_holders\"\n GET_RECOMMENDATIONS = \"get_recommendations\"\n GET_SUSTAINABILITY = \"get_sustainability\"\n GET_MAJOR_HOLDERS = \"get_major_holders\"\n GET_MUTUALFUND_HOLDERS = \"get_mutualfund_holders\"\n GET_INSIDER_PURCHASES = \"get_insider_purchases\"\n GET_INSIDER_TRANSACTIONS = \"get_insider_transactions\"\n GET_INSIDER_ROSTER_HOLDERS = \"get_insider_roster_holders\"\n GET_DIVIDENDS = \"get_dividends\"\n GET_CAPITAL_GAINS = \"get_capital_gains\"\n GET_SPLITS = \"get_splits\"\n GET_SHARES = \"get_shares\"\n GET_FAST_INFO = \"get_fast_info\"\n GET_SEC_FILINGS = \"get_sec_filings\"\n GET_RECOMMENDATIONS_SUMMARY = \"get_recommendations_summary\"\n GET_UPGRADES_DOWNGRADES = \"get_upgrades_downgrades\"\n GET_EARNINGS = \"get_earnings\"\n GET_INCOME_STMT = \"get_income_stmt\"\n\n\nclass YahooFinanceSchema(BaseModel):\n symbol: str = Field(..., description=\"The stock symbol to retrieve data for.\")\n method: YahooFinanceMethod = Field(YahooFinanceMethod.GET_INFO, description=\"The type of data to retrieve.\")\n num_news: int | None = Field(5, description=\"The number of news articles to retrieve.\")\n\n\nclass YfinanceToolComponent(LCToolComponent):\n display_name = \"Yahoo Finance\"\n description = \"\"\"Uses [yfinance](https://pypi.org/project/yfinance/) (unofficial package) \\\nto access financial data and market information from Yahoo Finance.\"\"\"\n icon = \"trending-up\"\n name = \"YahooFinanceTool\"\n\n inputs = [\n MessageTextInput(\n name=\"symbol\",\n display_name=\"Stock Symbol\",\n info=\"The stock symbol to retrieve data for (e.g., AAPL, GOOG).\",\n ),\n DropdownInput(\n name=\"method\",\n display_name=\"Data Method\",\n info=\"The type of data to retrieve.\",\n options=list(YahooFinanceMethod),\n value=\"get_news\",\n ),\n IntInput(\n name=\"num_news\",\n display_name=\"Number of News\",\n info=\"The number of news articles to retrieve (only applicable for get_news).\",\n value=5,\n ),\n ]\n\n def run_model(self) -> list[Data]:\n return self._yahoo_finance_tool(\n self.symbol,\n self.method,\n self.num_news,\n )\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"yahoo_finance\",\n description=\"Access financial data and market information from Yahoo Finance.\",\n func=self._yahoo_finance_tool,\n args_schema=YahooFinanceSchema,\n )\n\n def _yahoo_finance_tool(\n self,\n symbol: str,\n method: YahooFinanceMethod,\n num_news: int | None = 5,\n ) -> list[Data]:\n ticker = yf.Ticker(symbol)\n\n try:\n if method == YahooFinanceMethod.GET_INFO:\n result = ticker.info\n elif method == YahooFinanceMethod.GET_NEWS:\n result = ticker.news[:num_news]\n else:\n result = getattr(ticker, method.value)()\n\n result = pprint.pformat(result)\n\n if method == YahooFinanceMethod.GET_NEWS:\n data_list = [Data(data=article) for article in ast.literal_eval(result)]\n else:\n data_list = [Data(data={\"result\": result})]\n\n except Exception as e:\n error_message = f\"Error retrieving data: {e}\"\n logger.debug(error_message)\n self.status = error_message\n raise ToolException(error_message) from e\n\n return data_list\n" + }, + "method": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Data Method", + "dynamic": false, + "info": "The type of data to retrieve.", + "name": "method", + "options": [ + "get_info", + "get_news", + "get_actions", + "get_analysis", + "get_balance_sheet", + "get_calendar", + "get_cashflow", + "get_institutional_holders", + "get_recommendations", + "get_sustainability", + "get_major_holders", + "get_mutualfund_holders", + "get_insider_purchases", + "get_insider_transactions", + "get_insider_roster_holders", + "get_dividends", + "get_capital_gains", + "get_splits", + "get_shares", + "get_fast_info", + "get_sec_filings", + "get_recommendations_summary", + "get_upgrades_downgrades", + "get_earnings", + "get_income_stmt" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "get_news" + }, + "num_news": { + "_input_type": "IntInput", + "advanced": false, + "display_name": "Number of News", + "dynamic": false, + "info": "The number of news articles to retrieve (only applicable for get_news).", + "list": false, + "name": "num_news", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "symbol": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Stock Symbol", + "dynamic": false, + "info": "The stock symbol to retrieve data for (e.g., AAPL, GOOG).", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "symbol", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "NVDA" + } + }, + "tool_mode": false + }, + "type": "YahooFinanceTool" + }, + "dragging": false, + "height": 475, + "id": "YahooFinanceTool-PzHUy", + "position": { + "x": 1905.5096784216487, + "y": 313.6052678310467 + }, + "positionAbsolute": { + "x": 1905.5096784216487, + "y": 313.6052678310467 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatInput-dBek4", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "search news about AAPL" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-dBek4", + "position": { + "x": 1907.4497817799925, + "y": 817.955066634514 + }, + "positionAbsolute": { + "x": 1907.4497817799925, + "y": 817.955066634514 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-ULcvr", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-ULcvr", + "position": { + "x": 2683.9938458383212, + "y": 556.5828467235146 + }, + "positionAbsolute": { + "x": 2683.9938458383212, + "y": 556.5828467235146 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-FSVUJ", + "node": { + "description": "# Simple Agent\nA straightforward implementation of a chatbot focusing on processing inputs and generating responses employing conversation memory capabilities.\n## Core Components\n1. **Chat Input**\n - Collects user messages for processing.\n2. **Agent**\n - Analyzes user input.\n - Generates contextually relevant responses.\n - Utilizes tools to refine and enhance replies as needed.\n3. **Chat Output**\n - Presents formatted responses to the user.\n - Ensures consistent conversational flow.\n## Features\n- Processes each message independently.\n- Focuses on generating relevant, immediate responses.\n- Handles each chat interaction as a standalone session.\n## Quick Start\n1. Initiate a chat by sending a message in Chat Input.\n2. The Agent processes the message, considering any immediate context.\n3. Receive a response in Chat Output.\n\nThis simple agent chatbot provides a streamlined conversational flow without the complexity of managing conversation memory.", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 736, + "id": "note-FSVUJ", + "position": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "positionAbsolute": { + "x": 1512.8976594415833, + "y": 312.9558305744385 + }, + "resizing": false, + "selected": false, + "style": { + "height": 736, + "width": 382 + }, + "type": "noteNode", + "width": 382 + } + ], + "viewport": { + "x": -1275.792144730309, + "y": -144.09980323772618, + "zoom": 0.8828160439097184 + } + }, + "description": "Get started with an agent that calls the Yahoo Finance tool for quick access to stock prices, market trends, and financial data.", + "endpoint_name": null, + "icon": "Bot", + "id": "a774332d-6fb5-43b6-96a4-d3eb8e62ddc0", + "gradient": "5", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Simple Agent", + "tags": ["assistants", "agents"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json new file mode 100644 index 000000000000..efb9a9859d01 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json @@ -0,0 +1,3009 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-ImgzA", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-ZNoa2", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-ImgzA{œdataTypeœ:œAgentœ,œidœ:œAgent-ImgzAœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-ZNoa2{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-ZNoa2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-ImgzA", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-ImgzAœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-ZNoa2", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-ZNoa2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-cj2PH", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-ImgzA", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-cj2PH{œdataTypeœ:œAgentœ,œidœ:œAgent-cj2PHœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Agent-ImgzA{œfieldNameœ:œinput_valueœ,œidœ:œAgent-ImgzAœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-cj2PH", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-cj2PHœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-ImgzA", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-ImgzAœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-rPh1n", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-cj2PH", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-rPh1n{œdataTypeœ:œAgentœ,œidœ:œAgent-rPh1nœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-Agent-cj2PH{œfieldNameœ:œinput_valueœ,œidœ:œAgent-cj2PHœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-rPh1n", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-rPh1nœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-cj2PH", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-cj2PHœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "SearchAPI", + "id": "SearchAPI-Aez0t", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-rPh1n", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-SearchAPI-Aez0t{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-Aez0tœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-rPh1n{œfieldNameœ:œtoolsœ,œidœ:œAgent-rPh1nœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "SearchAPI-Aez0t", + "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-Aez0tœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-rPh1n", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-rPh1nœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "url_content_fetcher", + "id": "url_content_fetcher-AyGpn", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-cj2PH", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-url_content_fetcher-AyGpn{œdataTypeœ:œurl_content_fetcherœ,œidœ:œurl_content_fetcher-AyGpnœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-cj2PH{œfieldNameœ:œtoolsœ,œidœ:œAgent-cj2PHœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "url_content_fetcher-AyGpn", + "sourceHandle": "{œdataTypeœ: œurl_content_fetcherœ, œidœ: œurl_content_fetcher-AyGpnœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-cj2PH", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-cj2PHœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "CalculatorTool", + "id": "CalculatorTool-dGfrj", + "name": "api_build_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-ImgzA", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-CalculatorTool-dGfrj{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-dGfrjœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-ImgzA{œfieldNameœ:œtoolsœ,œidœ:œAgent-ImgzAœ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "CalculatorTool-dGfrj", + "sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-dGfrjœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-ImgzA", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-ImgzAœ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-CIU0F", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-rPh1n", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-CIU0F{œdataTypeœ:œChatInputœ,œidœ:œChatInput-CIU0Fœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-rPh1n{œfieldNameœ:œinput_valueœ,œidœ:œAgent-rPh1nœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-CIU0F", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-CIU0Fœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-rPh1n", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-rPh1nœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "id": "ChatInput-CIU0F", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Create a travel itinerary for a trip from São Paulo to Uberlândia, MG on August 23, 2024. The traveler enjoys drinking beer, eating pão de queijo, and drinking special coffee." + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-CIU0F", + "position": { + "x": 1756.77096149088, + "y": 305.19157712497963 + }, + "positionAbsolute": { + "x": 1756.77096149088, + "y": 305.19157712497963 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-ZNoa2", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-ZNoa2", + "position": { + "x": 4349.229697347143, + "y": 620.5490494265098 + }, + "positionAbsolute": { + "x": 4349.229697347143, + "y": 620.5490494265098 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "SearchAPI-Aez0t", + "node": { + "base_classes": ["Data", "list", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Call the searchapi.io API with result limiting", + "display_name": "Search API", + "documentation": "https://www.searchapi.io/docs/google", + "edited": false, + "field_order": [ + "engine", + "api_key", + "input_value", + "search_params" + ], + "frozen": false, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "official": false, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": ["api_key"], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": ["api_key"], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "SearchAPI API Key", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "EnE19gGWNyewCPsMj5c1fMGx" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import Any\n\nfrom langchain.tools import StructuredTool\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import DictInput, IntInput, MessageTextInput, MultilineInput, SecretStrInput\nfrom langflow.schema import Data\n\n\nclass SearchAPIComponent(LCToolComponent):\n display_name: str = \"Search API\"\n description: str = \"Call the searchapi.io API with result limiting\"\n name = \"SearchAPI\"\n documentation: str = \"https://www.searchapi.io/docs/google\"\n\n inputs = [\n MessageTextInput(name=\"engine\", display_name=\"Engine\", value=\"google\"),\n SecretStrInput(name=\"api_key\", display_name=\"SearchAPI API Key\", required=True),\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input\",\n ),\n DictInput(name=\"search_params\", display_name=\"Search parameters\", advanced=True, is_list=True),\n IntInput(name=\"max_results\", display_name=\"Max Results\", value=5, advanced=True),\n IntInput(name=\"max_snippet_length\", display_name=\"Max Snippet Length\", value=100, advanced=True),\n ]\n\n class SearchAPISchema(BaseModel):\n query: str = Field(..., description=\"The search query\")\n params: dict[str, Any] = Field(default_factory=dict, description=\"Additional search parameters\")\n max_results: int = Field(5, description=\"Maximum number of results to return\")\n max_snippet_length: int = Field(100, description=\"Maximum length of each result snippet\")\n\n def _build_wrapper(self):\n return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key)\n\n def build_tool(self) -> Tool:\n wrapper = self._build_wrapper()\n\n def search_func(\n query: str, params: dict[str, Any] | None = None, max_results: int = 5, max_snippet_length: int = 100\n ) -> list[dict[str, Any]]:\n params = params or {}\n full_results = wrapper.results(query=query, **params)\n organic_results = full_results.get(\"organic_results\", [])[:max_results]\n\n limited_results = []\n for result in organic_results:\n limited_result = {\n \"title\": result.get(\"title\", \"\")[:max_snippet_length],\n \"link\": result.get(\"link\", \"\"),\n \"snippet\": result.get(\"snippet\", \"\")[:max_snippet_length],\n }\n limited_results.append(limited_result)\n\n return limited_results\n\n tool = StructuredTool.from_function(\n name=\"search_api\",\n description=\"Search for recent results using searchapi.io with result limiting\",\n func=search_func,\n args_schema=self.SearchAPISchema,\n )\n\n self.status = f\"Search API Tool created with engine: {self.engine}\"\n return tool\n\n def run_model(self) -> list[Data]:\n tool = self.build_tool()\n results = tool.run(\n {\n \"query\": self.input_value,\n \"params\": self.search_params or {},\n \"max_results\": self.max_results,\n \"max_snippet_length\": self.max_snippet_length,\n }\n )\n\n data_list = [Data(data=result, text=result.get(\"snippet\", \"\")) for result in results]\n\n self.status = data_list\n return data_list\n" + }, + "engine": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Engine", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "engine", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "google" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "langflow docs" + }, + "max_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Results", + "dynamic": false, + "info": "", + "list": false, + "name": "max_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 5 + }, + "max_snippet_length": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Snippet Length", + "dynamic": false, + "info": "", + "list": false, + "name": "max_snippet_length", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "search_params": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Search parameters", + "dynamic": false, + "info": "", + "list": true, + "name": "search_params", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + } + } + }, + "type": "SearchAPI" + }, + "dragging": false, + "height": 407, + "id": "SearchAPI-Aez0t", + "position": { + "x": 2101.519951743063, + "y": 949.7032293566349 + }, + "positionAbsolute": { + "x": 2101.519951743063, + "y": 949.7032293566349 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "url_content_fetcher-AyGpn", + "node": { + "base_classes": ["Data", "list", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Fetch content from a single URL.", + "display_name": "URL Content Fetcher", + "documentation": "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base", + "edited": true, + "field_order": ["url", "fetch_params"], + "frozen": false, + "icon": "globe", + "lf_version": "1.0.19.post2", + "official": false, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "hidden": true, + "method": "run_model", + "name": "api_run_model", + "selected": "Data", + "types": ["Data", "list"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import Union, Optional\r\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\r\nfrom langflow.base.langchain_utilities.model import LCToolComponent\r\nfrom langflow.inputs import MessageTextInput, DictInput\r\nfrom langflow.schema import Data\r\nfrom langflow.field_typing import Tool\r\nfrom langchain.tools import StructuredTool\r\nfrom pydantic import BaseModel, Field\r\n\r\nclass URLToolComponent(LCToolComponent):\r\n display_name: str = \"URL Content Fetcher\"\r\n description: str = \"Fetch content from a single URL.\"\r\n name = \"url_content_fetcher\"\r\n documentation: str = \"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\"\r\n icon=\"globe\"\r\n \r\n inputs = [\r\n MessageTextInput(\r\n name=\"url\",\r\n display_name=\"URL\",\r\n info=\"Enter a single URL to fetch content from.\",\r\n ),\r\n DictInput(name=\"fetch_params\", display_name=\"Fetch parameters\", advanced=True, is_list=True),\r\n ]\r\n\r\n class URLContentFetcherSchema(BaseModel):\r\n url: str = Field(..., description=\"The URL to fetch content from\")\r\n fetch_params: Optional[dict] = Field(default=None, description=\"Additional parameters for fetching\")\r\n\r\n def run_model(self) -> Union[Data, list[Data]]:\r\n wrapper = self._build_wrapper()\r\n content = wrapper.load()[0]\r\n data = Data(data={\"content\": content.page_content, \"metadata\": content.metadata}, \r\n text=content.page_content[:500])\r\n self.status = data\r\n return data\r\n\r\n def build_tool(self) -> Tool:\r\n return StructuredTool.from_function(\r\n name=\"url_content_fetcher\",\r\n description=\"Fetch content from a single URL. Input should be a URL string only.\",\r\n func=self._fetch_url_content,\r\n args_schema=self.URLContentFetcherSchema,\r\n )\r\n\r\n def _build_wrapper(self):\r\n return WebBaseLoader(web_paths=[self.url], encoding=\"utf-8\", **self.fetch_params or {})\r\n\r\n def _fetch_url_content(self, url: str, fetch_params: Optional[dict] = None) -> dict:\r\n loader = WebBaseLoader(web_paths=[url], encoding=\"utf-8\", **(fetch_params or {}))\r\n content = loader.load()[0]\r\n return {\r\n \"content\": content.page_content,\r\n \"metadata\": content.metadata\r\n }" + }, + "fetch_params": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Fetch parameters", + "dynamic": false, + "info": "", + "list": true, + "name": "fetch_params", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "url": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "URL", + "dynamic": false, + "info": "Enter a single URL to fetch content from.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "url", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "url_content_fetcher" + }, + "dragging": false, + "height": 234, + "id": "url_content_fetcher-AyGpn", + "position": { + "x": 2834.525991812012, + "y": 939.6518333549263 + }, + "positionAbsolute": { + "x": 2834.525991812012, + "y": 939.6518333549263 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "CalculatorTool-dGfrj", + "node": { + "base_classes": ["Data", "list", "Sequence", "Tool"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Perform basic arithmetic operations on a given expression.", + "display_name": "Calculator", + "documentation": "", + "edited": false, + "field_order": ["expression"], + "frozen": false, + "icon": "calculator", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "official": false, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "run_model", + "name": "api_run_model", + "required_inputs": [], + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_tool", + "name": "api_build_tool", + "required_inputs": [], + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import ast\nimport operator\n\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import MessageTextInput\nfrom langflow.schema import Data\n\n\nclass CalculatorToolComponent(LCToolComponent):\n display_name = \"Calculator\"\n description = \"Perform basic arithmetic operations on a given expression.\"\n icon = \"calculator\"\n name = \"CalculatorTool\"\n\n inputs = [\n MessageTextInput(\n name=\"expression\",\n display_name=\"Expression\",\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\n tool_mode=True,\n ),\n ]\n\n class CalculatorToolSchema(BaseModel):\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\n\n def run_model(self) -> list[Data]:\n return self._evaluate_expression(self.expression)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"calculator\",\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\n func=self._eval_expr_with_error,\n args_schema=self.CalculatorToolSchema,\n )\n\n def _eval_expr(self, node):\n # Define the allowed operators\n operators = {\n ast.Add: operator.add,\n ast.Sub: operator.sub,\n ast.Mult: operator.mul,\n ast.Div: operator.truediv,\n ast.Pow: operator.pow,\n }\n if isinstance(node, ast.Num):\n return node.n\n if isinstance(node, ast.BinOp):\n return operators[type(node.op)](self._eval_expr(node.left), self._eval_expr(node.right))\n if isinstance(node, ast.UnaryOp):\n return operators[type(node.op)](self._eval_expr(node.operand))\n if isinstance(node, ast.Call):\n msg = (\n \"Function calls like sqrt(), sin(), cos() etc. are not supported. \"\n \"Only basic arithmetic operations (+, -, *, /, **) are allowed.\"\n )\n raise TypeError(msg)\n msg = f\"Unsupported operation or expression type: {type(node).__name__}\"\n raise TypeError(msg)\n\n def _eval_expr_with_error(self, expression: str) -> list[Data]:\n try:\n return self._evaluate_expression(expression)\n except Exception as e:\n raise ToolException(str(e)) from e\n\n def _evaluate_expression(self, expression: str) -> list[Data]:\n try:\n # Parse the expression and evaluate it\n tree = ast.parse(expression, mode=\"eval\")\n result = self._eval_expr(tree.body)\n\n # Format the result to a reasonable number of decimal places\n formatted_result = f\"{result:.6f}\".rstrip(\"0\").rstrip(\".\")\n\n self.status = formatted_result\n return [Data(data={\"result\": formatted_result})]\n\n except (SyntaxError, TypeError, KeyError) as e:\n error_message = f\"Invalid expression: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except ZeroDivisionError:\n error_message = \"Error: Division by zero\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except Exception as e: # noqa: BLE001\n logger.opt(exception=True).debug(\"Error evaluating expression\")\n error_message = f\"Error: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n" + }, + "expression": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Expression", + "dynamic": false, + "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "expression", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "2+2" + } + } + }, + "type": "CalculatorTool" + }, + "dragging": false, + "height": 254, + "id": "CalculatorTool-dGfrj", + "position": { + "x": 3546.599894399727, + "y": 972.1522299506486 + }, + "positionAbsolute": { + "x": 3546.599894399727, + "y": 972.1522299506486 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "City Selection Agent", + "id": "Agent-rPh1n", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "City Selection Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a helpful assistant that can use tools to answer questions and perform tasks." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": true, + "height": 650, + "id": "Agent-rPh1n", + "position": { + "x": 2472.7748760933105, + "y": 335.66187210240537 + }, + "positionAbsolute": { + "x": 2472.7748760933105, + "y": 335.66187210240537 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Local Expert Agent", + "id": "Agent-cj2PH", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Local Expert Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are a knowledgeable Local Expert with extensive information about the selected city, its attractions, and customs. Your goal is to provide the BEST insights about the city. Compile an in-depth guide for travelers, including key attractions, local customs, special events, and daily activity recommendations. Focus on hidden gems and local hotspots. Your final output should be a comprehensive city guide, rich in cultural insights and practical tips." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-cj2PH", + "position": { + "x": 3185.66991544494, + "y": 355.95841004876377 + }, + "positionAbsolute": { + "x": 3185.66991544494, + "y": 355.95841004876377 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Travel Concierge Agent", + "id": "Agent-ImgzA", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Travel Concierge Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are an Amazing Travel Concierge, a specialist in travel planning and logistics with decades of experience. Your goal is to create the most amazing travel itineraries with budget and packing suggestions for the city. Expand the city guide into a full 7-day travel itinerary with detailed per-day plans. Include weather forecasts, places to eat, packing suggestions, and a budget breakdown. Suggest actual places to visit, hotels to stay, and restaurants to go to. Your final output should be a complete expanded travel plan, formatted as markdown, encompassing a daily schedule, anticipated weather conditions, recommended clothing and items to pack, and a detailed budget." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-ImgzA", + "position": { + "x": 3889.695953842898, + "y": 370.3161168611889 + }, + "positionAbsolute": { + "x": 3889.695953842898, + "y": 370.3161168611889 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-jCp1N", + "node": { + "description": "# Travel Planning Agents \n\nThe travel planning system is a smart setup that uses several specialized \"agents\" to help plan incredible trips. Imagine each agent as a travel expert focusing on a part of your journey. Here's how it works:\n\n- **User-Friendly Start:** You start by telling the system about your travel needs—where you want to go and what you love to do.\n\n- **Data Collection:** The agents uses its tools to gather current info about various destinations, like the best travel times, weather, and costs.\n\n- **Three Key Agents:**\n - **City Selection Agent:** Picks the best places to visit based on your likes and current data.\n - **Local Expert Agent:** Gathers interesting details about what to do and see in the chosen city.\n - **Travel Concierge Agent:** Builds a day-by-day plan that includes where to stay, eat, and explore!\n\n- **Tools and Data:** Each agent uses tools to find and organize the latest information so you get recommendations that are both accurate and exciting.\n\n- **Final Plan:** Once everything is put together, you receive a complete, easy-to-follow travel itinerary, perfect for your adventure!\n", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 636, + "id": "note-jCp1N", + "position": { + "x": 1076.3710803600266, + "y": 92.06058855045646 + }, + "positionAbsolute": { + "x": 1076.3710803600266, + "y": 92.06058855045646 + }, + "resizing": false, + "selected": false, + "style": { + "height": 636, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "note-jgIF0", + "node": { + "description": "# **City Selection Agent**\n - **Purpose:** This agent evaluates potential travel destinations based on user input and external data sources.\n - **Core Functions:** Analyzes factors such as weather, local events, and travel costs to recommend optimal cities.\n - **Tools Utilized:** Employs APIs and data-fetching tools to gather real-time information for decision-making.\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 362, + "id": "note-jgIF0", + "position": { + "x": 2122.4146132377227, + "y": 485.2212661145467 + }, + "positionAbsolute": { + "x": 2122.4146132377227, + "y": 485.2212661145467 + }, + "resizing": false, + "selected": false, + "style": { + "height": 362, + "width": 331 + }, + "type": "noteNode", + "width": 331 + }, + { + "data": { + "id": "note-NTTln", + "node": { + "description": "# **Local Expert Agent**\n - **Purpose:** Focused on gathering and providing an in-depth guide to the selected city.\n - **Core Functions:** Compiles insights into cultural attractions, local customs, and unique experiences.\n - **Tools Utilized:** Uses web content fetchers and data APIs to collect detailed local insights and enhance the user understanding with hidden gems.\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 366, + "id": "note-NTTln", + "position": { + "x": 2827.660803823376, + "y": 488.6092281195304 + }, + "positionAbsolute": { + "x": 2827.660803823376, + "y": 488.6092281195304 + }, + "resizing": false, + "selected": false, + "style": { + "height": 366, + "width": 351 + }, + "type": "noteNode", + "width": 351 + }, + { + "data": { + "id": "note-45aOQ", + "node": { + "description": "# **Travel Concierge Agent**\n - **Purpose:** Crafts detailed travel itineraries that are customized to the traveler's interests and needs.\n - **Core Functions:** Offers a comprehensive daily schedule, including accommodations, dining spots, and activities.\n - **Tools Utilized:** Integrates calculators and data tools for accurate budget planning and itinerary logistics.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 344, + "id": "note-45aOQ", + "position": { + "x": 3536.084279543714, + "y": 496.3155992003396 + }, + "positionAbsolute": { + "x": 3536.084279543714, + "y": 496.3155992003396 + }, + "resizing": false, + "selected": false, + "style": { + "height": 344, + "width": 344 + }, + "type": "noteNode", + "width": 344 + }, + { + "data": { + "id": "note-elTLU", + "node": { + "description": "## Configure the agent by obtaining your OpenAI API key from [platform.openai.com](https://platform.openai.com). Under \"Model Provider\", choose:\n- OpenAI: Default, requires only API key\n- Anthropic/Azure/Groq/NVIDIA: Each requires their own API keys\n- Custom: Use your own model endpoint + authentication\n\nSelect model and input API key before running the flow.", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "rose" + } + }, + "type": "note" + }, + "dragging": false, + "height": 325, + "id": "note-elTLU", + "position": { + "x": 2463.3881993480218, + "y": 42.83594355441298 + }, + "positionAbsolute": { + "x": 2463.3881993480218, + "y": 42.83594355441298 + }, + "selected": false, + "type": "noteNode", + "width": 325 + } + ], + "viewport": { + "x": -1078.5758749396496, + "y": -166.63499501100648, + "zoom": 0.6513143480813044 + } + }, + "description": "Create a travel planning chatbot that uses specialized agents to craft personalized trip itineraries.", + "endpoint_name": null, + "icon": "Plane", + "id": "d6d33090-44c4-4a4b-8d06-c93fcf426446", + "gradient": "0", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Travel Planning Agents", + "tags": ["agents", "openai"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Twitter Thread Generator.json b/src/backend/base/langflow/initial_setup/starter_projects/Twitter Thread Generator.json new file mode 100644 index 000000000000..74b5f19c42c4 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Twitter Thread Generator.json @@ -0,0 +1,1881 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenAIModel", + "id": "OpenAIModel-t1to9", + "name": "text_output", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-n8Wmg", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-OpenAIModel-t1to9{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-t1to9œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-n8Wmg{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-n8Wmgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "OpenAIModel-t1to9", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-t1to9œ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-n8Wmg", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-n8Wmgœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-0yHbX", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "CONTENT_GUIDELINES", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-0yHbX{œdataTypeœ:œTextInputœ,œidœ:œTextInput-0yHbXœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œCONTENT_GUIDELINESœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-0yHbX", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-0yHbXœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œCONTENT_GUIDELINESœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-HVGJ1", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "OUTPUT_FORMAT", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-HVGJ1{œdataTypeœ:œTextInputœ,œidœ:œTextInput-HVGJ1œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œOUTPUT_FORMATœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-HVGJ1", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-HVGJ1œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œOUTPUT_FORMATœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-0s9hL", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "OUTPUT_LANGUAGE", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-0s9hL{œdataTypeœ:œTextInputœ,œidœ:œTextInput-0s9hLœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œOUTPUT_LANGUAGEœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-0s9hL", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-0s9hLœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œOUTPUT_LANGUAGEœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-ZUCJB", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "PROFILE_DETAILS", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-ZUCJB{œdataTypeœ:œTextInputœ,œidœ:œTextInput-ZUCJBœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œPROFILE_DETAILSœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-ZUCJB", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-ZUCJBœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œPROFILE_DETAILSœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-b4m4C", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "PROFILE_TYPE", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-b4m4C{œdataTypeœ:œTextInputœ,œidœ:œTextInput-b4m4Cœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œPROFILE_TYPEœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-b4m4C", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-b4m4Cœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œPROFILE_TYPEœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "TextInput", + "id": "TextInput-hSMuv", + "name": "text", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "TONE_AND_STYLE", + "id": "Prompt-nDs5I", + "inputTypes": ["Message", "Text"], + "type": "str" + } + }, + "id": "reactflow__edge-TextInput-hSMuv{œdataTypeœ:œTextInputœ,œidœ:œTextInput-hSMuvœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-nDs5I{œfieldNameœ:œTONE_AND_STYLEœ,œidœ:œPrompt-nDs5Iœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "TextInput-hSMuv", + "sourceHandle": "{œdataTypeœ: œTextInputœ, œidœ: œTextInput-hSMuvœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-nDs5I", + "targetHandle": "{œfieldNameœ: œTONE_AND_STYLEœ, œidœ: œPrompt-nDs5Iœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-nDs5I", + "name": "prompt", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "system_message", + "id": "OpenAIModel-t1to9", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Prompt-nDs5I{œdataTypeœ:œPromptœ,œidœ:œPrompt-nDs5Iœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-t1to9{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-t1to9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-nDs5I", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-nDs5Iœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-t1to9", + "targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-t1to9œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-tE8u3", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-t1to9", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-tE8u3{œdataTypeœ:œChatInputœ,œidœ:œChatInput-tE8u3œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-t1to9{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-t1to9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-tE8u3", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-tE8u3œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-t1to9", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-t1to9œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + } + ], + "nodes": [ + { + "data": { + "id": "ChatInput-tE8u3", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "*Objective:* Create an engaging Twitter thread that narrates the innovative journey of our LangFlow project, highlighting how we created a specialized flow for generating dynamic prompts for other flows, culminating in a model specialized in writing tweets/threads. *Project Stages:* 1. *Development in LangFlow:* - Created a flow focused on generating dynamic prompts - System serves as foundation for optimizing prompt generation in other flows 2. *Template Creation:* - Developed specific templates for tweets/threads - Focus on engagement and message clarity 3. *Results:* - 60% reduction in content creation time - Greater message consistency - Better social media engagement - Fully automated process *Thread Objectives:* - Educate about LangFlow's capabilities in content creation - Demonstrate the development process step by step - Inspire other developers to explore LangFlow - Strengthen the developer community" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + } + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-tE8u3", + "position": { + "x": 863.3241377184722, + "y": 1053.9324095084933 + }, + "positionAbsolute": { + "x": 863.3241377184722, + "y": 1053.9324095084933 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-0yHbX", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Content Guidelines", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "- Thread must be 5-7 tweets long - Each tweet should be self-contained but flow naturally to the next - Include relevant technical details while keeping language accessible - Use emojis sparingly but effectively - Include a clear call-to-action in the final tweet - Highlight key benefits and innovative aspects - Maintain professional but engaging tone" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-0yHbX", + "position": { + "x": 1300.291760633212, + "y": 417.7819626108867 + }, + "positionAbsolute": { + "x": 1300.291760633212, + "y": 417.7819626108867 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "id": "OpenAIModel-t1to9", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": false, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + } + }, + "tool_mode": false + }, + "type": "OpenAIModel" + }, + "dragging": false, + "height": 630, + "id": "OpenAIModel-t1to9", + "position": { + "x": 2072.0401998064262, + "y": 828.7738120746212 + }, + "positionAbsolute": { + "x": 2072.0401998064262, + "y": 828.7738120746212 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-n8Wmg", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-n8Wmg", + "position": { + "x": 2470.223353127597, + "y": 1055.4039338762416 + }, + "positionAbsolute": { + "x": 2470.223353127597, + "y": 1055.4039338762416 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-HVGJ1", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Output Format", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "thread" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-HVGJ1", + "position": { + "x": 1300.639277084099, + "y": 665.0274048594538 + }, + "positionAbsolute": { + "x": 1300.639277084099, + "y": 665.0274048594538 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-0s9hL", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Output Language", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "English" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-0s9hL", + "position": { + "x": 1302.1321888373375, + "y": 910.3592488005739 + }, + "positionAbsolute": { + "x": 1302.1321888373375, + "y": 910.3592488005739 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-ZUCJB", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Profile Details", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "- Tech startup focused on AI/ML innovation - Active in open-source community - Experienced in building developer tools - Known for clear technical communication - Engaged audience of developers and AI enthusiasts" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-ZUCJB", + "position": { + "x": 1302.0774628387737, + "y": 1167.3244357663511 + }, + "positionAbsolute": { + "x": 1302.0774628387737, + "y": 1167.3244357663511 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-hSMuv", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Tone And Style", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "- Professional yet approachable - Technical but accessible - Enthusiastic about innovation - Educational and informative - Collaborative and community-focused - Clear and concise - Solution-oriented" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-hSMuv", + "position": { + "x": 1301.68182643676, + "y": 1699.978793221378 + }, + "positionAbsolute": { + "x": 1301.68182643676, + "y": 1699.978793221378 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "TextInput-b4m4C", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get text inputs from the Playground.", + "display_name": "Profile Type", + "documentation": "", + "edited": false, + "field_order": ["input_value"], + "frozen": false, + "icon": "type", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import MultilineInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Playground.\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Text to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "Tech Company / AI Developer Platform" + } + } + }, + "type": "TextInput" + }, + "dragging": false, + "height": 234, + "id": "TextInput-b4m4C", + "position": { + "x": 1301.4778537945892, + "y": 1428.1749742780207 + }, + "positionAbsolute": { + "x": 1301.4778537945892, + "y": 1428.1749742780207 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-oQQCJ", + "node": { + "description": "# Twitter Thread Generator\n\nWelcome to the Twitter Thread Generator! This flow helps you create compelling Twitter threads by transforming your structured inputs into engaging content.\n\n## Instructions\n\n1. Prepare Your Inputs\n - Fill in the \"Context\" with your main message or story\n - Define \"Content Guidelines\" for thread structure and style\n - Specify \"Profile Type\" and \"Profile Details\" to reflect your brand identity\n - Set \"Tone and Style\" to guide the communication approach\n - Choose \"Output Format\" (thread) and desired language\n\n2. Configure the Prompt\n - The flow uses a specialized prompt template to generate content\n - Ensure all input fields are connected to the prompt node\n\n3. Run the Generation\n - Execute the flow to process your inputs\n - The OpenAI model will create the thread based on your specifications\n\n4. Review and Refine\n - Examine the output in the Chat Output node\n - If needed, adjust your inputs and re-run for better results\n\n5. Finalize and Post\n - Once satisfied, copy the generated thread\n - Post to Twitter, maintaining the structure and flow\n\nRemember: Be specific in your context and guidelines for the best results! 🚀\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "amber" + } + }, + "type": "note" + }, + "dragging": false, + "height": 800, + "id": "note-oQQCJ", + "position": { + "x": 675.0099418843004, + "y": 233.23451233469402 + }, + "positionAbsolute": { + "x": 675.0099418843004, + "y": 233.23451233469402 + }, + "resizing": false, + "selected": false, + "style": { + "height": 800, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-nDs5I", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "PROFILE_TYPE", + "PROFILE_DETAILS", + "CONTENT_GUIDELINES", + "TONE_AND_STYLE", + "OUTPUT_FORMAT", + "OUTPUT_LANGUAGE" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": ["template"], + "frozen": false, + "icon": "prompts", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "CONTENT_GUIDELINES": { + "advanced": false, + "display_name": "CONTENT_GUIDELINES", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "CONTENT_GUIDELINES", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "OUTPUT_FORMAT": { + "advanced": false, + "display_name": "OUTPUT_FORMAT", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "OUTPUT_FORMAT", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "OUTPUT_LANGUAGE": { + "advanced": false, + "display_name": "OUTPUT_LANGUAGE", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "OUTPUT_LANGUAGE", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "PROFILE_DETAILS": { + "advanced": false, + "display_name": "PROFILE_DETAILS", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "PROFILE_DETAILS", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "PROFILE_TYPE": { + "advanced": false, + "display_name": "PROFILE_TYPE", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "PROFILE_TYPE", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "TONE_AND_STYLE": { + "advanced": false, + "display_name": "TONE_AND_STYLE", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "TONE_AND_STYLE", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "\nIntroduce the task of generating tweets or tweet threads based on the provided inputs\n\nExplain each input variable:\n\n{{PROFILE_TYPE}}\n\n{{PROFILE_DETAILS}}\n\n{{CONTENT_GUIDELINES}}\n\n{{TONE_AND_STYLE}}\n\n{{CONTEXT}}\n\n{{OUTPUT_FORMAT}}\n\n{{OUTPUT_LANGUAGE}}\n\nProvide step-by-step instructions on how to analyze the inputs to determine if a single tweet or thread is appropriate\n\nGive guidance on generating tweet content that aligns with the profile, guidelines, tone, style, and context\n\nExplain how to format the output based on the {{OUTPUT_FORMAT}} value\n\nProvide tips for creating engaging, coherent tweet content\n\n\n\n\nYou are an AI tweet generator that can create standalone tweets or multi-tweet threads based on a variety of inputs about the desired content. Here are the key inputs you will use to generate the tweet(s):\n\n\n\n{PROFILE_TYPE}\n\n\n\n\n\n{PROFILE_DETAILS}\n\n\n\n\n\n{CONTENT_GUIDELINES}\n\n\n\n\n\n{TONE_AND_STYLE}\n\n\n\n\n\n{OUTPUT_FORMAT}\n\n\n\n\n\n\n{OUTPUT_LANGUAGE}\n\n\n\nTo generate the appropriate tweet(s), follow these steps:\n\n\n\nCarefully analyze the {{PROFILE_TYPE}}, {{PROFILE_DETAILS}}, {{CONTENT_GUIDELINES}}, {{TONE_AND_STYLE}}, and {{CONTEXT}} to determine the depth and breadth of content needed.\n\nIf the {{OUTPUT_FORMAT}} is \"single_tweet\", plan to convey the key information in a concise, standalone tweet.\n\nIf the {{OUTPUT_FORMAT}} is \"thread\" or if the content seems too complex for a single tweet, outline a series of connected tweets that flow together to cover the topic.\n\n\n\n\n\nBrainstorm tweet content that aligns with the {{PROFILE_TYPE}} and {{PROFILE_DETAILS}}, adheres to the {{CONTENT_GUIDELINES}}, matches the {{TONE_AND_STYLE}}, and incorporates the {{CONTEXT}}.\n\nFor a single tweet, craft the most engaging, informative message possible within the 280 character limit.\n\nFor a thread, break down the content into distinct yet connected tweet-sized chunks. Ensure each tweet flows logically into the next to maintain reader engagement. Use transitional phrases as needed to link tweets.\n\n\n\n\nFormat the output based on the {{OUTPUT_FORMAT}}:\n\nFor a single tweet, provide the content.\n\nFor a thread, include each tweet inside numbered markdown list.\n\n \nFocus on creating original, engaging content that provides value to the intended audience.\n\nOptimize the tweet(s) for the 280 character limit. Be concise yet impactful.\n\nMaintain a consistent voice that matches the {{TONE_AND_STYLE}} throughout the tweet(s).\n\nInclude calls-to-action or questions to drive engagement when appropriate.\n\nDouble check that the final output aligns with the {{PROFILE_DETAILS}} and {{CONTENT_GUIDELINES}}.\n\n\n\nNow create a Tweet or Twitter Thread for this context:\n\n" + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 779, + "id": "Prompt-nDs5I", + "position": { + "x": 1697.1682096049744, + "y": 675.4022940880462 + }, + "positionAbsolute": { + "x": 1697.1682096049744, + "y": 675.4022940880462 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": -137.29857182316698, + "y": -73.67014598946389, + "zoom": 0.49710856297206957 + } + }, + "description": "Transform structured inputs into engaging Twitter threads with this prompt-based flow, maintaining brand voice and technical accuracy.", + "endpoint_name": null, + "icon": "TwitterLogoIcon", + "id": "e73336f0-7ac5-42a5-827c-4b060a0556c6", + "gradient": "4", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "Twitter Thread Generator", + "tags": ["chatbots", "content-generation"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index c62b2492c7cb..95efad77bc6f 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -2,268 +2,233 @@ "data": { "edges": [ { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-1QVCE", - "name": "message", - "output_types": [ - "Message" - ] + "dataType": "ParseData", + "id": "ParseData-pqaC7", + "name": "text", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "search_input", - "id": "AstraVectorStoreComponent-vXWPf", - "inputTypes": [ - "Message" - ], + "fieldName": "context", + "id": "Prompt-uqDcR", + "inputTypes": ["Message", "Text"], "type": "str" } }, - "id": "reactflow__edge-ChatInput-1QVCE{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1QVCEœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraVectorStoreComponent-vXWPf{œfieldNameœ:œsearch_inputœ,œidœ:œAstraVectorStoreComponent-vXWPfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "ChatInput-1QVCE", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1QVCEœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "AstraVectorStoreComponent-vXWPf", - "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraVectorStoreComponent-vXWPfœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-ParseData-pqaC7{œdataTypeœ:œParseDataœ,œidœ:œParseData-pqaC7œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-uqDcR{œfieldNameœ:œcontextœ,œidœ:œPrompt-uqDcRœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ParseData-pqaC7", + "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-pqaC7œ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-uqDcR", + "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-uqDcRœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "ParseData", - "id": "ParseData-QVaZr", - "name": "text", - "output_types": [ - "Message" - ] + "dataType": "Prompt", + "id": "Prompt-uqDcR", + "name": "prompt", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "context", - "id": "Prompt-eV1SH", - "inputTypes": [ - "Message", - "Text" - ], + "fieldName": "input_value", + "id": "OpenAIModel-H0ANr", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-ParseData-QVaZr{œdataTypeœ:œParseDataœ,œidœ:œParseData-QVaZrœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-eV1SH{œfieldNameœ:œcontextœ,œidœ:œPrompt-eV1SHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ParseData-QVaZr", - "sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-QVaZrœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-eV1SH", - "targetHandle": "{œfieldNameœ: œcontextœ, œidœ: œPrompt-eV1SHœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-Prompt-uqDcR{œdataTypeœ:œPromptœ,œidœ:œPrompt-uqDcRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-H0ANr{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-H0ANrœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Prompt-uqDcR", + "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-uqDcRœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", + "target": "OpenAIModel-H0ANr", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-H0ANrœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "ChatInput", - "id": "ChatInput-1QVCE", - "name": "message", - "output_types": [ - "Message" - ] + "dataType": "OpenAIModel", + "id": "OpenAIModel-H0ANr", + "name": "text_output", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "question", - "id": "Prompt-eV1SH", - "inputTypes": [ - "Message", - "Text" - ], + "fieldName": "input_value", + "id": "ChatOutput-a4EPO", + "inputTypes": ["Message"], "type": "str" } }, - "id": "reactflow__edge-ChatInput-1QVCE{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1QVCEœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-eV1SH{œfieldNameœ:œquestionœ,œidœ:œPrompt-eV1SHœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", - "source": "ChatInput-1QVCE", - "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1QVCEœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", - "target": "Prompt-eV1SH", - "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-eV1SHœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIModel-H0ANr{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-H0ANrœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-a4EPO{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-a4EPOœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "OpenAIModel-H0ANr", + "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-H0ANrœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-a4EPO", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-a4EPOœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "File", - "id": "File-RKdDQ", - "name": "data", - "output_types": [ - "Data" - ] + "dataType": "AstraDB", + "id": "AstraDB-3buPx", + "name": "search_results", + "output_types": ["Data"] }, "targetHandle": { - "fieldName": "data_inputs", - "id": "SplitText-74sLS", - "inputTypes": [ - "Data" - ], + "fieldName": "data", + "id": "ParseData-pqaC7", + "inputTypes": ["Data"], "type": "other" } }, - "id": "reactflow__edge-File-RKdDQ{œdataTypeœ:œFileœ,œidœ:œFile-RKdDQœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-74sLS{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-74sLSœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "File-RKdDQ", - "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-RKdDQœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", - "target": "SplitText-74sLS", - "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-74sLSœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-AstraDB-3buPx{œdataTypeœ:œAstraDBœ,œidœ:œAstraDB-3buPxœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-pqaC7{œfieldNameœ:œdataœ,œidœ:œParseData-pqaC7œ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "AstraDB-3buPx", + "sourceHandle": "{œdataTypeœ: œAstraDBœ, œidœ: œAstraDB-3buPxœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", + "target": "ParseData-pqaC7", + "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-pqaC7œ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "SplitText", - "id": "SplitText-74sLS", - "name": "chunks", - "output_types": [ - "Data" - ] + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-CeoV9", + "name": "embeddings", + "output_types": ["Embeddings"] }, "targetHandle": { - "fieldName": "ingest_data", - "id": "AstraVectorStoreComponent-wvuVK", - "inputTypes": [ - "Data" - ], + "fieldName": "embedding", + "id": "AstraDB-3buPx", + "inputTypes": ["Embeddings"], "type": "other" } }, - "id": "reactflow__edge-SplitText-74sLS{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-74sLSœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraVectorStoreComponent-wvuVK{œfieldNameœ:œingest_dataœ,œidœ:œAstraVectorStoreComponent-wvuVKœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "SplitText-74sLS", - "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-74sLSœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", - "target": "AstraVectorStoreComponent-wvuVK", - "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraVectorStoreComponent-wvuVKœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-OpenAIEmbeddings-CeoV9{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-CeoV9œ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-3buPx{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-3buPxœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-CeoV9", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-CeoV9œ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-3buPx", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-3buPxœ, œinputTypesœ: [œEmbeddingsœ], œtypeœ: œotherœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-rQV2h", - "name": "embeddings", - "output_types": [ - "Embeddings" - ] + "dataType": "ChatInput", + "id": "ChatInput-nd3Fq", + "name": "message", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "embedding", - "id": "AstraVectorStoreComponent-wvuVK", - "inputTypes": [ - "Embeddings", - "dict" - ], - "type": "other" + "fieldName": "search_input", + "id": "AstraDB-3buPx", + "inputTypes": ["Message"], + "type": "str" } }, - "id": "reactflow__edge-OpenAIEmbeddings-rQV2h{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-rQV2hœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraVectorStoreComponent-wvuVK{œfieldNameœ:œembeddingœ,œidœ:œAstraVectorStoreComponent-wvuVKœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-rQV2h", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-rQV2hœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraVectorStoreComponent-wvuVK", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraVectorStoreComponent-wvuVKœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-ChatInput-nd3Fq{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nd3Fqœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-AstraDB-3buPx{œfieldNameœ:œsearch_inputœ,œidœ:œAstraDB-3buPxœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "ChatInput-nd3Fq", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nd3Fqœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "AstraDB-3buPx", + "targetHandle": "{œfieldNameœ: œsearch_inputœ, œidœ: œAstraDB-3buPxœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "OpenAIEmbeddings", - "id": "OpenAIEmbeddings-EJT2O", - "name": "embeddings", - "output_types": [ - "Embeddings" - ] + "dataType": "ChatInput", + "id": "ChatInput-nd3Fq", + "name": "message", + "output_types": ["Message"] }, "targetHandle": { - "fieldName": "embedding", - "id": "AstraVectorStoreComponent-vXWPf", - "inputTypes": [ - "Embeddings", - "dict" - ], - "type": "other" + "fieldName": "question", + "id": "Prompt-uqDcR", + "inputTypes": ["Message", "Text"], + "type": "str" } }, - "id": "reactflow__edge-OpenAIEmbeddings-EJT2O{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-EJT2Oœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraVectorStoreComponent-vXWPf{œfieldNameœ:œembeddingœ,œidœ:œAstraVectorStoreComponent-vXWPfœ,œinputTypesœ:[œEmbeddingsœ,œdictœ],œtypeœ:œotherœ}", - "source": "OpenAIEmbeddings-EJT2O", - "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-EJT2Oœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", - "target": "AstraVectorStoreComponent-vXWPf", - "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraVectorStoreComponent-vXWPfœ, œinputTypesœ: [œEmbeddingsœ, œdictœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-ChatInput-nd3Fq{œdataTypeœ:œChatInputœ,œidœ:œChatInput-nd3Fqœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-uqDcR{œfieldNameœ:œquestionœ,œidœ:œPrompt-uqDcRœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-nd3Fq", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-nd3Fqœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Prompt-uqDcR", + "targetHandle": "{œfieldNameœ: œquestionœ, œidœ: œPrompt-uqDcRœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "Prompt", - "id": "Prompt-eV1SH", - "name": "prompt", - "output_types": [ - "Message" - ] + "dataType": "SplitText", + "id": "SplitText-QakmY", + "name": "chunks", + "output_types": ["Data"] }, "targetHandle": { - "fieldName": "input_value", - "id": "OpenAIModel-DUuku", - "inputTypes": [ - "Message" - ], - "type": "str" + "fieldName": "ingest_data", + "id": "AstraDB-laybz", + "inputTypes": ["Data"], + "type": "other" } }, - "id": "reactflow__edge-Prompt-eV1SH{œdataTypeœ:œPromptœ,œidœ:œPrompt-eV1SHœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-DUuku{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-DUukuœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "Prompt-eV1SH", - "sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-eV1SHœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", - "target": "OpenAIModel-DUuku", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-DUukuœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-SplitText-QakmY{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-QakmYœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-AstraDB-laybz{œfieldNameœ:œingest_dataœ,œidœ:œAstraDB-laybzœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "SplitText-QakmY", + "sourceHandle": "{œdataTypeœ: œSplitTextœ, œidœ: œSplitText-QakmYœ, œnameœ: œchunksœ, œoutput_typesœ: [œDataœ]}", + "target": "AstraDB-laybz", + "targetHandle": "{œfieldNameœ: œingest_dataœ, œidœ: œAstraDB-laybzœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" }, { + "animated": false, "className": "", "data": { "sourceHandle": { - "dataType": "OpenAIModel", - "id": "OpenAIModel-DUuku", - "name": "text_output", - "output_types": [ - "Message" - ] + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-ANgku", + "name": "embeddings", + "output_types": ["Embeddings"] }, "targetHandle": { - "fieldName": "input_value", - "id": "ChatOutput-OrmMa", - "inputTypes": [ - "Message" - ], - "type": "str" + "fieldName": "embedding", + "id": "AstraDB-laybz", + "inputTypes": ["Embeddings"], + "type": "other" } }, - "id": "reactflow__edge-OpenAIModel-DUuku{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-DUukuœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-OrmMa{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-OrmMaœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", - "source": "OpenAIModel-DUuku", - "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-DUukuœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", - "target": "ChatOutput-OrmMa", - "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-OrmMaœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + "id": "reactflow__edge-OpenAIEmbeddings-ANgku{œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ANgkuœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-AstraDB-laybz{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-laybzœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", + "source": "OpenAIEmbeddings-ANgku", + "sourceHandle": "{œdataTypeœ: œOpenAIEmbeddingsœ, œidœ: œOpenAIEmbeddings-ANgkuœ, œnameœ: œembeddingsœ, œoutput_typesœ: [œEmbeddingsœ]}", + "target": "AstraDB-laybz", + "targetHandle": "{œfieldNameœ: œembeddingœ, œidœ: œAstraDB-laybzœ, œinputTypesœ: [œEmbeddingsœ], œtypeœ: œotherœ}" }, { "className": "", "data": { "sourceHandle": { - "dataType": "AstraVectorStoreComponent", - "id": "AstraVectorStoreComponent-vXWPf", - "name": "search_results", - "output_types": [ - "Data" - ] + "dataType": "File", + "id": "File-FJIuH", + "name": "data", + "output_types": ["Data"] }, "targetHandle": { - "fieldName": "data", - "id": "ParseData-QVaZr", - "inputTypes": [ - "Data" - ], + "fieldName": "data_inputs", + "id": "SplitText-QakmY", + "inputTypes": ["Data"], "type": "other" } }, - "id": "reactflow__edge-AstraVectorStoreComponent-vXWPf{œdataTypeœ:œAstraVectorStoreComponentœ,œidœ:œAstraVectorStoreComponent-vXWPfœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-QVaZr{œfieldNameœ:œdataœ,œidœ:œParseData-QVaZrœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", - "source": "AstraVectorStoreComponent-vXWPf", - "sourceHandle": "{œdataTypeœ: œAstraVectorStoreComponentœ, œidœ: œAstraVectorStoreComponent-vXWPfœ, œnameœ: œsearch_resultsœ, œoutput_typesœ: [œDataœ]}", - "target": "ParseData-QVaZr", - "targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-QVaZrœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" + "id": "reactflow__edge-File-FJIuH{œdataTypeœ:œFileœ,œidœ:œFile-FJIuHœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-QakmY{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-QakmYœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", + "source": "File-FJIuH", + "sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-FJIuHœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", + "target": "SplitText-QakmY", + "targetHandle": "{œfieldNameœ: œdata_inputsœ, œidœ: œSplitText-QakmYœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" } ], "nodes": [ @@ -271,11 +236,9 @@ "data": { "description": "Get chat inputs from the Playground.", "display_name": "Chat Input", - "id": "ChatInput-1QVCE", + "id": "ChatInput-nd3Fq", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -292,7 +255,10 @@ "files" ], "frozen": false, - "icon": "ChatInput", + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -301,15 +267,51 @@ "method": "message_response", "name": "message", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, "code": { "advanced": true, "dynamic": true, @@ -326,7 +328,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, "files": { "advanced": true, @@ -374,9 +376,7 @@ "display_name": "Text", "dynamic": false, "info": "Message to be passed as input.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "multiline": true, @@ -388,7 +388,7 @@ "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "What is the document is about?" }, "sender": { "advanced": true, @@ -396,10 +396,7 @@ "dynamic": false, "info": "Type of sender.", "name": "sender", - "options": [ - "Machine", - "User" - ], + "options": ["Machine", "User"], "placeholder": "", "required": false, "show": true, @@ -413,9 +410,7 @@ "display_name": "Sender Name", "dynamic": false, "info": "Name of the sender.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "sender_name", @@ -433,9 +428,7 @@ "display_name": "Session ID", "dynamic": false, "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "session_id", @@ -462,186 +455,212 @@ "trace_as_metadata": true, "type": "bool", "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" } } }, "type": "ChatInput" }, "dragging": false, - "height": 308, - "id": "ChatInput-1QVCE", + "height": 234, + "id": "ChatInput-nd3Fq", "position": { - "x": 642.3545710150049, - "y": 220.22556606238678 + "x": 743.9745420290319, + "y": 463.6977510207854 }, "positionAbsolute": { - "x": 642.3545710150049, - "y": 220.22556606238678 + "x": 743.9745420290319, + "y": 463.6977510207854 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Implementation of Vector Store using Astra DB with search capabilities", - "display_name": "Astra DB", - "edited": false, - "id": "AstraVectorStoreComponent-vXWPf", + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "id": "ParseData-pqaC7", "node": { - "base_classes": [ - "Data", - "Retriever" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Implementation of Vector Store using Astra DB with search capabilities", - "display_name": "Astra DB", - "documentation": "https://python.langchain.com/docs/integrations/vectorstores/astradb", + "description": "Convert Data into plain text following a specified template.", + "display_name": "Parse Data", + "documentation": "", "edited": false, - "field_order": [ - "collection_name", - "token", - "api_endpoint", - "search_input", - "ingest_data", - "namespace", - "metric", - "batch_size", - "bulk_insert_batch_concurrency", - "bulk_insert_overwrite_concurrency", - "bulk_delete_concurrency", - "setup_mode", - "pre_delete_collection", - "metadata_indexing_include", - "embedding", - "metadata_indexing_exclude", - "collection_indexing_policy", - "number_of_results", - "search_type", - "search_score_threshold", - "search_filter" - ], + "field_order": ["data", "template", "sep"], "frozen": false, - "icon": "AstraDB", + "icon": "braces", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Retriever", - "method": "build_base_retriever", - "name": "base_retriever", - "selected": "Retriever", - "types": [ - "Retriever" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Search Results", - "method": "search_documents", - "name": "search_results", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Vector Store", - "method": "cast_vector_store", - "name": "vector_store", - "selected": "VectorStore", - "types": [ - "VectorStore" - ], + "display_name": "Text", + "method": "parse_data", + "name": "text", + "selected": "Message", + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "api_endpoint": { - "advanced": false, - "display_name": "API Endpoint", - "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_endpoint", - "password": true, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "str", - "value": "" - }, - "batch_size": { + "code": { "advanced": true, - "display_name": "Batch Size", - "dynamic": false, - "info": "Optional number of data to process in a single batch.", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", "list": false, - "name": "batch_size", + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "type": "code", + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" }, - "bulk_delete_concurrency": { - "advanced": true, - "display_name": "Bulk Delete Concurrency", + "data": { + "advanced": false, + "display_name": "Data", "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", + "info": "The data to convert to text.", + "input_types": ["Data"], "list": false, - "name": "bulk_delete_concurrency", + "name": "data", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", + "type": "other", "value": "" }, - "bulk_insert_batch_concurrency": { + "sep": { "advanced": true, - "display_name": "Bulk Insert Batch Concurrency", + "display_name": "Separator", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", + "info": "", "list": false, - "name": "bulk_insert_batch_concurrency", + "load_from_db": false, + "name": "sep", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": "" + "type": "str", + "value": "\n" }, - "bulk_insert_overwrite_concurrency": { - "advanced": true, - "display_name": "Bulk Insert Overwrite Concurrency", + "template": { + "advanced": false, + "display_name": "Template", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", + "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", + "input_types": ["Message"], "list": false, - "name": "bulk_insert_overwrite_concurrency", + "load_from_db": false, + "multiline": true, + "name": "template", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", - "value": "" - }, + "type": "str", + "value": "{text}" + } + } + }, + "type": "ParseData" + }, + "dragging": false, + "height": 302, + "id": "ParseData-pqaC7", + "position": { + "x": 1606.0595305373527, + "y": 751.4473696960695 + }, + "positionAbsolute": { + "x": 1606.0595305373527, + "y": 751.4473696960695 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-uqDcR", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": ["context", "question"] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "error": null, + "field_order": ["template"], + "frozen": false, + "full_path": null, + "icon": "prompts", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "name": "", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", "code": { "advanced": true, "dynamic": true, @@ -658,202 +677,427 @@ "show": true, "title_case": false, "type": "code", - "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" }, - "collection_indexing_policy": { - "advanced": true, - "display_name": "Collection Indexing Policy", + "context": { + "advanced": false, + "display_name": "context", "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], "list": false, "load_from_db": false, - "name": "collection_indexing_policy", + "multiline": true, + "name": "context", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, "type": "str", "value": "" }, - "collection_name": { + "question": { "advanced": false, - "display_name": "Collection Name", + "display_name": "question", "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": ["Message", "Text"], "list": false, "load_from_db": false, - "name": "collection_name", + "multiline": true, + "name": "question", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, "type": "str", - "value": "langflow" + "value": "" }, - "embedding": { + "template": { "advanced": false, - "display_name": "Embedding or Astra Vectorize", + "display_name": "Template", "dynamic": false, - "info": "Allows either an embedding model or an Astra Vectorize configuration.", - "input_types": [ - "Embeddings", - "dict" - ], + "info": "", "list": false, - "name": "embedding", + "load_from_db": false, + "name": "template", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" - }, - "ingest_data": { + "trace_as_input": true, + "type": "prompt", + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: " + } + }, + "tool_mode": false + }, + "type": "Prompt" + }, + "dragging": false, + "height": 433, + "id": "Prompt-uqDcR", + "position": { + "x": 1977.9097981422992, + "y": 640.5656416923846 + }, + "positionAbsolute": { + "x": 1977.9097981422992, + "y": 640.5656416923846 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Split text into chunks based on specified criteria.", + "display_name": "Split Text", + "id": "SplitText-QakmY", + "node": { + "base_classes": ["Data"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Split text into chunks based on specified criteria.", + "display_name": "Split Text", + "documentation": "", + "edited": false, + "field_order": [ + "data_inputs", + "chunk_overlap", + "chunk_size", + "separator" + ], + "frozen": false, + "icon": "scissors-line-dashed", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Chunks", + "method": "split_text", + "name": "chunks", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "chunk_overlap": { "advanced": false, - "display_name": "Ingest Data", + "display_name": "Chunk Overlap", "dynamic": false, - "info": "", - "input_types": [ - "Data" - ], - "list": true, - "name": "ingest_data", + "info": "Number of characters to overlap between chunks.", + "list": false, + "name": "chunk_overlap", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "other", - "value": "" + "type": "int", + "value": 200 }, - "metadata_indexing_exclude": { - "advanced": true, - "display_name": "Metadata Indexing Exclude", + "chunk_size": { + "advanced": false, + "display_name": "Chunk Size", "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", + "info": "The maximum number of characters in each chunk.", "list": false, - "load_from_db": false, - "name": "metadata_indexing_exclude", + "name": "chunk_size", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "" + "type": "int", + "value": 1000 }, - "metadata_indexing_include": { + "code": { "advanced": true, - "display_name": "Metadata Indexing Include", - "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", "list": false, "load_from_db": false, - "name": "metadata_indexing_include", + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" + "type": "code", + "value": "from langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n name = \"SplitText\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n return [Data(text=doc.page_content, data=doc.metadata) for doc in docs]\n\n def split_text(self) -> list[Data]:\n separator = unescape_string(self.separator)\n\n documents = [_input.to_lc_document() for _input in self.data_inputs if isinstance(_input, Data)]\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" }, - "metric": { - "advanced": true, - "display_name": "Metric", + "data_inputs": { + "advanced": false, + "display_name": "Data Inputs", "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "name": "metric", - "options": [ - "cosine", - "dot_product", - "euclidean" - ], + "info": "The data to split.", + "input_types": ["Data"], + "list": true, + "name": "data_inputs", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", + "type": "other", "value": "" }, - "namespace": { - "advanced": true, - "display_name": "Namespace", + "separator": { + "advanced": false, + "display_name": "Separator", "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", + "info": "The character to split on. Defaults to newline.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "namespace", + "name": "separator", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "" - }, - "number_of_results": { - "advanced": true, - "display_name": "Number of Results", - "dynamic": false, - "info": "Number of results to return.", - "list": false, - "name": "number_of_results", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 4 - }, - "pre_delete_collection": { - "advanced": true, - "display_name": "Pre Delete Collection", - "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", - "list": false, - "name": "pre_delete_collection", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false - }, - "search_filter": { - "advanced": true, - "display_name": "Search Metadata Filter", - "dynamic": false, - "info": "Optional dictionary of filters to apply to the search query.", - "list": true, - "name": "search_filter", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "value": "\n" + } + } + }, + "type": "SplitText" + }, + "dragging": false, + "height": 475, + "id": "SplitText-QakmY", + "position": { + "x": 1683.4543896546102, + "y": 1350.7871623588553 + }, + "positionAbsolute": { + "x": 1683.4543896546102, + "y": 1350.7871623588553 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-i0pUr", + "node": { + "description": "## 📄 Load Data Flow\n\n**Purpose**: This flow is designed to ingest local files and store their contents as vectors in the database.\n\n**Steps to Use:**\n\n1. **Upload File**: Use the \"File\" component to upload your data file. Ensure the file format is supported (e.g., `txt`, `pdf`).\n2. **Text Splitting**: The \"Split Text\" component will automatically segment your document into smaller, manageable chunks for efficient processing.\n3. **Generate Embeddings**: OpenAI models will convert text chunks into vector representations that can be indexed in Astra DB.\n4. **Store Vectors**: The \"Astra DB\" component completes the process by saving these vectors, making them ready for retrieval and search operations.\n\n**Quick Tips**:\n- Always ensure your files are correctly formatted and free of errors before uploading.\n- Monitor the component indicators to confirm data processing and storage status.\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 445, + "id": "note-i0pUr", + "position": { + "x": 806.5400894704665, + "y": 1410.461680629912 + }, + "positionAbsolute": { + "x": 806.5400894704665, + "y": 1410.461680629912 + }, + "resizing": false, + "selected": false, + "style": { + "height": 445, + "width": 493 + }, + "type": "noteNode", + "width": 493 + }, + { + "data": { + "id": "note-9DvF2", + "node": { + "description": "# Vector Store RAG Overview\n\n\nThis Vector Store RAG workflow combines data ingestion and retrieval into a unified process, allowing you to manage and query your data efficiently.\n\n**Components**:\n- **📄 Load Data**: Prepares data for vector database storage.\n - File ingestion\n - Text chunking\n - Embedding generation\n - Storage in Astra DB\n\n- **✨ Retriever**: Provides intelligent search and retrieval from the vector database.\n - User query input\n - Database search\n - Enhanced response generation using AI models\n\n**Workflow Instructions**:\n1. Initiate the **Load Data** flow to input your document into the vector database.\n2. Use the **Retriever** flow to conduct queries and obtain comprehensive responses based on your stored data.\n3. Adjust settings like API keys and collection names as needed for your specific use case.\n\n**Benefits**:\n- Streamlines data management with a single interface.\n- Fast, scalable vector-based search capabilities.\n- Integrates cutting-edge AI technology for rich, context-aware outputs.\n", + "display_name": "Read Me", + "documentation": "", + "template": { + "backgroundColor": "indigo" + } + }, + "type": "note" + }, + "dragging": false, + "height": 798, + "id": "note-9DvF2", + "position": { + "x": 134.64227176140844, + "y": 307.10406179806375 + }, + "positionAbsolute": { + "x": 134.64227176140844, + "y": 307.10406179806375 + }, + "resizing": false, + "selected": false, + "style": { + "height": 798, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "note-RC6pg", + "node": { + "description": "## Astra DB Setup\n\n**Important Setup Information:**\n\nTo use the Astra DB component in this workflow, you'll need to obtain an Astra DB key. Follow these steps to set up your access:\n\n1. **Create an Account**: Visit [DataStax Accounts](https://accounts.datastax.com) and create an account if you don't already have one.\n2. **Generate a Key**: Once logged in, navigate to the \"Astra DB\" section to create a new application token. This token will serve as your authentication key for API access.\n3. **Configure Endpoint**: Note the API endpoint URL provided by Astra DB. This is essential for connecting your vector database to the workflow.\n4. **Input Token**: Enter the generated token and API endpoint URL in the appropriate fields in the Astra DB component within the workflow.\n\n", + "display_name": "", + "documentation": "", + "template": { + "backgroundColor": "blue" + } + }, + "type": "note" + }, + "dragging": false, + "height": 358, + "id": "note-RC6pg", + "position": { + "x": 1180.1117634200216, + "y": -2.4324300661269547 + }, + "positionAbsolute": { + "x": 1180.1117634200216, + "y": -2.4324300661269547 + }, + "resizing": false, + "selected": false, + "style": { + "height": 358, + "width": 412 + }, + "type": "noteNode", + "width": 412 + }, + { + "data": { + "id": "OpenAIModel-H0ANr", + "node": { + "base_classes": ["LanguageModel", "Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "system_message", + "stream", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser" + ], + "frozen": false, + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Text", + "method": "text_response", + "name": "text_output", + "required_inputs": [], + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" }, - "search_input": { + { + "cache": true, + "display_name": "Language Model", + "method": "build_model", + "name": "model_output", + "required_inputs": [], + "selected": "LanguageModel", + "types": ["LanguageModel"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", "advanced": false, - "display_name": "Search Input", + "display_name": "OpenAI API Key", "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", "info": "", - "input_types": [ - "Message" - ], "list": false, "load_from_db": false, "multiline": true, - "name": "search_input", + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, SecretStrInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", "placeholder": "", "required": false, "show": true, @@ -863,136 +1107,309 @@ "type": "str", "value": "" }, - "search_score_threshold": { + "json_mode": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Search Score Threshold", + "display_name": "JSON Mode", "dynamic": false, - "info": "Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", + "info": "If True, it will output JSON regardless of passing a schema.", "list": false, - "name": "search_score_threshold", + "name": "json_mode", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "float", - "value": 0 + "type": "bool", + "value": false }, - "search_type": { + "max_tokens": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Search Type", + "display_name": "Max Tokens", "dynamic": false, - "info": "Search type to use", - "name": "search_type", + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", "options": [ - "Similarity", - "Similarity with score threshold", - "MMR (Max Marginal Relevance)" + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" ], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "Similarity" + "value": "gpt-4o-mini" }, - "setup_mode": { + "openai_api_base": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Setup Mode", + "display_name": "OpenAI API Base", "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", - "name": "setup_mode", - "options": [ - "Sync", - "Async", - "Off" - ], + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "stream": { + "_input_type": "BoolInput", + "advanced": false, + "display_name": "Stream", + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "list": false, + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "System Message", + "dynamic": false, + "info": "System message to pass to the model.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "system_message", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "Sync" + "value": "" }, - "token": { + "temperature": { + "_input_type": "FloatInput", "advanced": false, - "display_name": "Astra DB Application Token", + "display_name": "Temperature", "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "token", - "password": true, + "info": "", + "list": false, + "name": "temperature", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "str", - "value": "" + "trace_as_metadata": true, + "type": "float", + "value": 0.1 } - } + }, + "tool_mode": false }, - "type": "AstraVectorStoreComponent" + "type": "OpenAIModel" }, "dragging": false, - "height": 753, - "id": "AstraVectorStoreComponent-vXWPf", + "height": 543, + "id": "OpenAIModel-H0ANr", "position": { - "x": 1246.0381406498648, - "y": 333.25157075413966 + "x": 2360.1432368563187, + "y": 571.6712358167248 }, "positionAbsolute": { - "x": 1246.0381406498648, - "y": 333.25157075413966 + "x": 2360.1432368563187, + "y": 571.6712358167248 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Convert Data into plain text following a specified template.", - "display_name": "Parse Data", - "id": "ParseData-QVaZr", + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-a4EPO", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Message"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Convert Data into plain text following a specified template.", - "display_name": "Parse Data", + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", "documentation": "", "edited": false, "field_order": [ - "data", - "template", - "sep" + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" ], "frozen": false, - "icon": "braces", + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Text", - "method": "parse_data", - "name": "text", + "display_name": "Message", + "method": "message_response", + "name": "message", "selected": "Message", - "types": [ - "Message" - ], + "types": ["Message"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, "code": { "advanced": true, "dynamic": true, @@ -1009,270 +1426,309 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n" + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" }, - "data": { + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", "advanced": false, - "display_name": "Data", + "display_name": "Text", "dynamic": false, - "info": "The data to convert to text.", - "input_types": [ - "Data" - ], + "info": "Message to be passed as output.", + "input_types": ["Message"], "list": false, - "name": "data", + "load_from_db": false, + "name": "input_value", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_input": true, "trace_as_metadata": true, - "type": "other", + "type": "str", "value": "" }, - "sep": { + "sender": { + "_input_type": "DropdownInput", "advanced": true, - "display_name": "Separator", + "combobox": false, + "display_name": "Sender Type", "dynamic": false, - "info": "", + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "sep", + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "\n" + "value": "" }, - "template": { - "advanced": false, - "display_name": "Template", + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", "dynamic": false, - "info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", - "input_types": [ - "Message" - ], + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], "list": false, "load_from_db": false, - "multiline": true, - "name": "template", + "name": "text_color", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "{text}" + "value": "" } - } + }, + "tool_mode": false }, - "type": "ParseData" + "type": "ChatOutput" }, "dragging": false, - "height": 384, - "id": "ParseData-QVaZr", + "height": 234, + "id": "ChatOutput-a4EPO", "position": { - "x": 1854.1518317915907, - "y": 459.3386924128532 + "x": 2734.385670401691, + "y": 808.2967893015561 }, "positionAbsolute": { - "x": 1854.1518317915907, - "y": 459.3386924128532 + "x": 2734.385670401691, + "y": 808.2967893015561 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "id": "Prompt-eV1SH", + "id": "AstraDB-3buPx", "node": { - "base_classes": [ - "Message" - ], + "base_classes": ["Data", "Retriever"], "beta": false, "conditional_paths": [], - "custom_fields": { - "template": [ - "context", - "question" - ] - }, - "description": "Create a prompt template with dynamic variables.", - "display_name": "Prompt", - "documentation": "", + "custom_fields": {}, + "description": "Implementation of Vector Store using Astra DB with search capabilities", + "display_name": "Astra DB", + "documentation": "https://docs.langflow.org/starter-projects-vector-store-rag", "edited": false, "field_order": [ - "template" + "token", + "api_endpoint", + "collection_name", + "search_input", + "ingest_data", + "namespace", + "embedding_service", + "embedding", + "metric", + "batch_size", + "bulk_insert_batch_concurrency", + "bulk_insert_overwrite_concurrency", + "bulk_delete_concurrency", + "setup_mode", + "pre_delete_collection", + "metadata_indexing_include", + "metadata_indexing_exclude", + "collection_indexing_policy", + "number_of_results", + "search_type", + "search_score_threshold", + "search_filter" ], "frozen": false, - "icon": "prompts", + "icon": "AstraDB", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Prompt Message", - "method": "build_prompt", - "name": "prompt", - "selected": "Message", - "types": [ - "Message" - ], + "display_name": "Retriever", + "method": "build_base_retriever", + "name": "base_retriever", + "required_inputs": [], + "selected": "Retriever", + "types": ["Retriever"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Search Results", + "method": "search_documents", + "name": "search_results", + "required_inputs": ["api_endpoint", "collection_name", "token"], + "selected": "Data", + "types": ["Data"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "code": { + "api_endpoint": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "API Endpoint", + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_endpoint", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "ASTRA_DB_API_ENDPOINT" + }, + "batch_size": { + "_input_type": "IntInput", "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", + "display_name": "Batch Size", + "dynamic": false, + "info": "Optional number of data to process in a single batch.", "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "batch_size", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + "trace_as_metadata": true, + "type": "int", + "value": "" }, - "context": { - "advanced": false, - "display_name": "context", + "bulk_delete_concurrency": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Bulk Delete Concurrency", "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], + "info": "Optional concurrency level for bulk delete operations.", "list": false, - "load_from_db": false, - "multiline": true, - "name": "context", - "password": false, + "name": "bulk_delete_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, - "type": "str", + "trace_as_metadata": true, + "type": "int", "value": "" }, - "question": { - "advanced": false, - "display_name": "question", + "bulk_insert_batch_concurrency": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Bulk Insert Batch Concurrency", "dynamic": false, - "field_type": "str", - "fileTypes": [], - "file_path": "", - "info": "", - "input_types": [ - "Message", - "Text" - ], + "info": "Optional concurrency level for bulk insert operations.", "list": false, - "load_from_db": false, - "multiline": true, - "name": "question", - "password": false, + "name": "bulk_insert_batch_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, - "type": "str", + "trace_as_metadata": true, + "type": "int", "value": "" }, - "template": { - "advanced": false, - "display_name": "Template", + "bulk_insert_overwrite_concurrency": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Bulk Insert Overwrite Concurrency", "dynamic": false, - "info": "", + "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", "list": false, - "load_from_db": false, - "name": "template", + "name": "bulk_insert_overwrite_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "type": "prompt", - "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: " - } - } - }, - "type": "Prompt" - }, - "dragging": false, - "height": 515, - "id": "Prompt-eV1SH", - "position": { - "x": 2486.0988668404975, - "y": 496.5120474157301 - }, - "positionAbsolute": { - "x": 2486.0988668404975, - "y": 496.5120474157301 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "id": "ChatOutput-OrmMa", - "node": { - "base_classes": [ - "Message" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Display a chat message in the Playground.", - "display_name": "Chat Output", - "documentation": "", - "edited": false, - "field_order": [ - "input_value", - "should_store_message", - "sender", - "sender_name", - "session_id", - "data_template" - ], - "frozen": false, - "icon": "ChatOutput", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Message", - "method": "message_response", - "name": "message", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", + "trace_as_metadata": true, + "type": "int", + "value": "" + }, "code": { "advanced": true, "dynamic": true, @@ -1289,1052 +1745,937 @@ "show": true, "title_case": false, "type": "code", - "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" + "value": "import os\n\nimport orjson\nfrom astrapy.admin import parse_api_endpoint\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput, MessageTextInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://docs.langflow.org/starter-projects-vector-store-rag\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n VECTORIZE_PROVIDERS_MAPPING = {\n \"Azure OpenAI\": [\"azureOpenAI\", [\"text-embedding-3-small\", \"text-embedding-3-large\", \"text-embedding-ada-002\"]],\n \"Hugging Face - Dedicated\": [\"huggingfaceDedicated\", [\"endpoint-defined-model\"]],\n \"Hugging Face - Serverless\": [\n \"huggingface\",\n [\n \"sentence-transformers/all-MiniLM-L6-v2\",\n \"intfloat/multilingual-e5-large\",\n \"intfloat/multilingual-e5-large-instruct\",\n \"BAAI/bge-small-en-v1.5\",\n \"BAAI/bge-base-en-v1.5\",\n \"BAAI/bge-large-en-v1.5\",\n ],\n ],\n \"Jina AI\": [\n \"jinaAI\",\n [\n \"jina-embeddings-v2-base-en\",\n \"jina-embeddings-v2-base-de\",\n \"jina-embeddings-v2-base-es\",\n \"jina-embeddings-v2-base-code\",\n \"jina-embeddings-v2-base-zh\",\n ],\n ],\n \"Mistral AI\": [\"mistral\", [\"mistral-embed\"]],\n \"NVIDIA\": [\"nvidia\", [\"NV-Embed-QA\"]],\n \"OpenAI\": [\"openai\", [\"text-embedding-3-small\", \"text-embedding-3-large\", \"text-embedding-ada-002\"]],\n \"Upstage\": [\"upstageAI\", [\"solar-embedding-1-large\"]],\n \"Voyage AI\": [\n \"voyageAI\",\n [\"voyage-large-2-instruct\", \"voyage-law-2\", \"voyage-code-2\", \"voyage-large-2\", \"voyage-2\"],\n ],\n }\n\n inputs = [\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n advanced=os.getenv(\"ASTRA_ENHANCED\", \"false\").lower() == \"true\",\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"Database\" if os.getenv(\"ASTRA_ENHANCED\", \"false\").lower() == \"true\" else \"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"embedding_choice\",\n display_name=\"Embedding Model or Astra Vectorize\",\n info=\"Determines whether to use Astra Vectorize for the collection.\",\n options=[\"Embedding Model\", \"Astra Vectorize\"],\n real_time_refresh=True,\n value=\"Embedding Model\",\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding Model\",\n input_types=[\"Embeddings\"],\n info=\"Allows an embedding model configuration.\",\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n value=\"cosine\",\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync' or 'Off'.\",\n options=[\"Sync\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n is_list=True,\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n is_list=True,\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info='Optional JSON string for the \"indexing\" field of the collection. '\n \"See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. \"\n \"(when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n def del_fields(self, build_config, field_list):\n for field in field_list:\n if field in build_config:\n del build_config[field]\n\n return build_config\n\n def insert_in_dict(self, build_config, field_name, new_parameters):\n # Insert the new key-value pair after the found key\n for new_field_name, new_parameter in new_parameters.items():\n # Get all the items as a list of tuples (key, value)\n items = list(build_config.items())\n\n # Find the index of the key to insert after\n idx = len(items)\n for i, (key, _) in enumerate(items):\n if key == field_name:\n idx = i + 1\n break\n\n items.insert(idx, (new_field_name, new_parameter))\n\n # Clear the original dictionary and update with the modified items\n build_config.clear()\n build_config.update(items)\n\n return build_config\n\n def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):\n if field_name == \"embedding_choice\":\n if field_value == \"Astra Vectorize\":\n self.del_fields(build_config, [\"embedding\"])\n\n new_parameter = DropdownInput(\n name=\"embedding_provider\",\n display_name=\"Embedding Provider\",\n options=self.VECTORIZE_PROVIDERS_MAPPING.keys(),\n value=\"\",\n required=True,\n real_time_refresh=True,\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_choice\", {\"embedding_provider\": new_parameter})\n else:\n self.del_fields(\n build_config,\n [\n \"embedding_provider\",\n \"model\",\n \"z_01_model_parameters\",\n \"z_02_api_key_name\",\n \"z_03_provider_api_key\",\n \"z_04_authentication\",\n ],\n )\n\n new_parameter = HandleInput(\n name=\"embedding\",\n display_name=\"Embedding Model\",\n input_types=[\"Embeddings\"],\n info=\"Allows an embedding model configuration.\",\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_choice\", {\"embedding\": new_parameter})\n\n elif field_name == \"embedding_provider\":\n self.del_fields(\n build_config,\n [\"model\", \"z_01_model_parameters\", \"z_02_api_key_name\", \"z_03_provider_api_key\", \"z_04_authentication\"],\n )\n\n model_options = self.VECTORIZE_PROVIDERS_MAPPING[field_value][1]\n\n new_parameter = DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The embedding model to use for the selected provider. Each provider has a different set of \"\n \"models available (full list at \"\n \"https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\\n\\n\"\n f\"{', '.join(model_options)}\",\n options=model_options,\n value=None,\n required=True,\n real_time_refresh=True,\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_provider\", {\"model\": new_parameter})\n\n elif field_name == \"model\":\n self.del_fields(\n build_config,\n [\"z_01_model_parameters\", \"z_02_api_key_name\", \"z_03_provider_api_key\", \"z_04_authentication\"],\n )\n\n new_parameter_1 = DictInput(\n name=\"z_01_model_parameters\",\n display_name=\"Model Parameters\",\n is_list=True,\n ).to_dict()\n\n new_parameter_2 = MessageTextInput(\n name=\"z_02_api_key_name\",\n display_name=\"API Key Name\",\n info=\"The name of the embeddings provider API key stored on Astra. \"\n \"If set, it will override the 'ProviderKey' in the authentication parameters.\",\n ).to_dict()\n\n new_parameter_3 = SecretStrInput(\n load_from_db=False,\n name=\"z_03_provider_api_key\",\n display_name=\"Provider API Key\",\n info=\"An alternative to the Astra Authentication that passes an API key for the provider \"\n \"with each request to Astra DB. \"\n \"This may be used when Vectorize is configured for the collection, \"\n \"but no corresponding provider secret is stored within Astra's key management system.\",\n ).to_dict()\n\n new_parameter_4 = DictInput(\n name=\"z_04_authentication\",\n display_name=\"Authentication Parameters\",\n is_list=True,\n ).to_dict()\n\n self.insert_in_dict(\n build_config,\n \"model\",\n {\n \"z_01_model_parameters\": new_parameter_1,\n \"z_02_api_key_name\": new_parameter_2,\n \"z_03_provider_api_key\": new_parameter_3,\n \"z_04_authentication\": new_parameter_4,\n },\n )\n\n return build_config\n\n def build_vectorize_options(self, **kwargs):\n for attribute in [\n \"embedding_provider\",\n \"model\",\n \"z_01_model_parameters\",\n \"z_02_api_key_name\",\n \"z_03_provider_api_key\",\n \"z_04_authentication\",\n ]:\n if not hasattr(self, attribute):\n setattr(self, attribute, None)\n\n # Fetch values from kwargs if any self.* attributes are None\n provider_value = self.VECTORIZE_PROVIDERS_MAPPING.get(self.embedding_provider, [None])[0] or kwargs.get(\n \"embedding_provider\"\n )\n model_name = self.model or kwargs.get(\"model\")\n authentication = {**(self.z_04_authentication or kwargs.get(\"z_04_authentication\", {}))}\n parameters = self.z_01_model_parameters or kwargs.get(\"z_01_model_parameters\", {})\n\n # Set the API key name if provided\n api_key_name = self.z_02_api_key_name or kwargs.get(\"z_02_api_key_name\")\n provider_key = self.z_03_provider_api_key or kwargs.get(\"z_03_provider_api_key\")\n if api_key_name:\n authentication[\"providerKey\"] = api_key_name\n\n # Set authentication and parameters to None if no values are provided\n if not authentication:\n authentication = None\n if not parameters:\n parameters = None\n\n return {\n # must match astrapy.info.CollectionVectorServiceOptions\n \"collection_vector_service_options\": {\n \"provider\": provider_value,\n \"modelName\": model_name,\n \"authentication\": authentication,\n \"parameters\": parameters,\n },\n \"collection_embedding_api_key\": provider_key,\n }\n\n @check_cached_vector_store\n def build_vector_store(self, vectorize_options=None):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError as e:\n msg = (\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n raise ImportError(msg) from e\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError as e:\n msg = f\"Invalid setup mode: {self.setup_mode}\"\n raise ValueError(msg) from e\n\n if self.embedding_choice == \"Embedding Model\":\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n # Fetch values from kwargs if any self.* attributes are None\n dict_options = vectorize_options or self.build_vectorize_options()\n\n # Set the embedding dictionary\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(\n dict_options.get(\"collection_vector_service_options\")\n ),\n \"collection_embedding_api_key\": dict_options.get(\"collection_embedding_api_key\"),\n }\n\n try:\n vector_store = AstraDBVectorStore(\n collection_name=self.collection_name,\n token=self.token,\n api_endpoint=self.api_endpoint,\n namespace=self.namespace or None,\n environment=parse_api_endpoint(self.api_endpoint).environment if self.api_endpoint else None,\n metric=self.metric or None,\n batch_size=self.batch_size or None,\n bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None,\n bulk_insert_overwrite_concurrency=self.bulk_insert_overwrite_concurrency or None,\n bulk_delete_concurrency=self.bulk_delete_concurrency or None,\n setup_mode=setup_mode_value,\n pre_delete_collection=self.pre_delete_collection,\n metadata_indexing_include=[s for s in self.metadata_indexing_include if s] or None,\n metadata_indexing_exclude=[s for s in self.metadata_indexing_exclude if s] or None,\n collection_indexing_policy=orjson.dumps(self.collection_indexing_policy)\n if self.collection_indexing_policy\n else None,\n **embedding_dict,\n )\n except Exception as e:\n msg = f\"Error initializing AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n\n self._add_documents_to_vector_store(vector_store)\n\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store) -> None:\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n msg = \"Vector Store Inputs must be Data objects.\"\n raise TypeError(msg)\n\n if documents:\n self.log(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n msg = f\"Error adding documents to AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n else:\n self.log(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self) -> str:\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n if self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self, vector_store=None) -> list[Data]:\n if not vector_store:\n vector_store = self.build_vector_store()\n\n self.log(f\"Search input: {self.search_input}\")\n self.log(f\"Search type: {self.search_type}\")\n self.log(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n msg = f\"Error performing search in AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n\n self.log(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n self.log(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n self.log(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" }, - "data_template": { + "collection_indexing_policy": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Data Template", + "display_name": "Collection Indexing Policy", "dynamic": false, - "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", - "input_types": [ - "Message" - ], + "info": "Optional JSON string for the \"indexing\" field of the collection. See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option", "list": false, "load_from_db": false, - "name": "data_template", + "name": "collection_indexing_policy", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "{text}" + "value": "" }, - "input_value": { + "collection_name": { + "_input_type": "StrInput", "advanced": false, - "display_name": "Text", + "display_name": "Collection Name", "dynamic": false, - "info": "Message to be passed as output.", - "input_types": [ - "Message" - ], + "info": "The name of the collection within Astra DB where the vectors will be stored.", "list": false, "load_from_db": false, - "name": "input_value", + "name": "collection_name", "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "sender": { - "advanced": true, - "display_name": "Sender Type", + "embedding": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Embedding Model", "dynamic": false, - "info": "Type of sender.", - "name": "sender", - "options": [ - "Machine", - "User" - ], + "info": "Allows an embedding model configuration.", + "input_types": ["Embeddings"], + "list": false, + "name": "embedding", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "embedding_choice": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Embedding Model or Astra Vectorize", + "dynamic": false, + "info": "Determines whether to use Astra Vectorize for the collection.", + "name": "embedding_choice", + "options": ["Embedding Model", "Astra Vectorize"], "placeholder": "", + "real_time_refresh": true, "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "Machine" + "value": "Embedding Model" }, - "sender_name": { - "advanced": true, - "display_name": "Sender Name", + "ingest_data": { + "_input_type": "DataInput", + "advanced": false, + "display_name": "Ingest Data", "dynamic": false, - "info": "Name of the sender.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "sender_name", + "info": "", + "input_types": ["Data"], + "list": true, + "name": "ingest_data", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, - "type": "str", - "value": "AI" + "type": "other", + "value": "" }, - "session_id": { + "metadata_indexing_exclude": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Session ID", + "display_name": "Metadata Indexing Exclude", "dynamic": false, - "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "input_types": [ - "Message" - ], - "list": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "list": true, "load_from_db": false, - "name": "session_id", + "name": "metadata_indexing_exclude", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "should_store_message": { + "metadata_indexing_include": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Store Messages", + "display_name": "Metadata Indexing Include", "dynamic": false, - "info": "Store the message in the history.", - "list": false, - "name": "should_store_message", + "info": "Optional list of metadata fields to include in the indexing.", + "list": true, + "load_from_db": false, + "name": "metadata_indexing_include", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "bool", - "value": true - } - } - }, - "type": "ChatOutput" - }, - "dragging": false, - "height": 308, - "id": "ChatOutput-OrmMa", - "position": { - "x": 3769.242086248817, - "y": 585.3403837062634 - }, - "positionAbsolute": { - "x": 3769.242086248817, - "y": 585.3403837062634 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "Split text into chunks based on specified criteria.", - "display_name": "Split Text", - "id": "SplitText-74sLS", - "node": { - "base_classes": [ - "Data" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "Split text into chunks based on specified criteria.", - "display_name": "Split Text", - "documentation": "", - "edited": false, - "field_order": [ - "data_inputs", - "chunk_overlap", - "chunk_size", - "separator" - ], - "frozen": false, - "icon": "scissors-line-dashed", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Chunks", - "method": "split_text", - "name": "chunks", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "chunk_overlap": { - "advanced": false, - "display_name": "Chunk Overlap", + "type": "str", + "value": "" + }, + "metric": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Metric", "dynamic": false, - "info": "Number of characters to overlap between chunks.", + "info": "Optional distance metric for vector comparisons in the vector store.", + "name": "metric", + "options": ["cosine", "dot_product", "euclidean"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "cosine" + }, + "namespace": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "Namespace", + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", "list": false, - "name": "chunk_overlap", + "load_from_db": false, + "name": "namespace", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": 200 + "type": "str", + "value": "" }, - "chunk_size": { - "advanced": false, - "display_name": "Chunk Size", + "number_of_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Results", "dynamic": false, - "info": "The maximum number of characters in each chunk.", + "info": "Number of results to return.", "list": false, - "name": "chunk_size", + "name": "number_of_results", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "int", - "value": 1000 + "value": 4 }, - "code": { + "pre_delete_collection": { + "_input_type": "BoolInput", "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", + "display_name": "Pre Delete Collection", + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "pre_delete_collection", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n name = \"SplitText\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n" + "trace_as_metadata": true, + "type": "bool", + "value": false }, - "data_inputs": { - "advanced": false, - "display_name": "Data Inputs", + "search_filter": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Search Metadata Filter", "dynamic": false, - "info": "The data to split.", - "input_types": [ - "Data" - ], + "info": "Optional dictionary of filters to apply to the search query.", "list": true, - "name": "data_inputs", + "name": "search_filter", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "other", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "separator": { + "search_input": { + "_input_type": "MultilineInput", "advanced": false, - "display_name": "Separator", + "display_name": "Search Input", "dynamic": false, - "info": "The character to split on. Defaults to newline.", - "input_types": [ - "Message" - ], + "info": "", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "separator", + "multiline": true, + "name": "search_input", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", - "value": "\n" - } - } - }, - "type": "SplitText" - }, - "dragging": false, - "height": 527, - "id": "SplitText-74sLS", - "position": { - "x": 2044.2799160989089, - "y": 1185.3130355818519 - }, - "positionAbsolute": { - "x": 2044.2799160989089, - "y": 1185.3130355818519 - }, - "selected": false, - "type": "genericNode", - "width": 384 - }, - { - "data": { - "description": "A generic file loader.", - "display_name": "File", - "id": "File-RKdDQ", - "node": { - "base_classes": [ - "Data" - ], - "beta": false, - "conditional_paths": [], - "custom_fields": {}, - "description": "A generic file loader.", - "display_name": "File", - "documentation": "", - "edited": false, - "field_order": [ - "path", - "silent_errors" - ], - "frozen": false, - "icon": "file-text", - "output_types": [], - "outputs": [ - { - "cache": true, - "display_name": "Data", - "method": "load_file", - "name": "data", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - } - ], - "pinned": false, - "template": { - "_type": "Component", - "code": { + "value": "" + }, + "search_score_threshold": { + "_input_type": "FloatInput", "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", + "display_name": "Search Score Threshold", + "dynamic": false, + "info": "Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "search_score_threshold", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n" + "trace_as_metadata": true, + "type": "float", + "value": 0 }, - "path": { - "advanced": false, - "display_name": "Path", + "search_type": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Search Type", "dynamic": false, - "fileTypes": [ - "txt", - "md", - "mdx", - "csv", - "json", - "yaml", - "yml", - "xml", - "html", - "htm", - "pdf", - "docx", - "py", - "sh", - "sql", - "js", - "ts", - "tsx" + "info": "Search type to use", + "name": "search_type", + "options": [ + "Similarity", + "Similarity with score threshold", + "MMR (Max Marginal Relevance)" ], - "file_path": "", - "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", - "list": false, - "name": "path", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, - "type": "file", - "value": "" + "type": "str", + "value": "Similarity" }, - "silent_errors": { + "setup_mode": { + "_input_type": "DropdownInput", "advanced": true, - "display_name": "Silent Errors", + "combobox": false, + "display_name": "Setup Mode", "dynamic": false, - "info": "If true, errors will not raise an exception.", - "list": false, - "name": "silent_errors", + "info": "Configuration mode for setting up the vector store, with options like 'Sync' or 'Off'.", + "name": "setup_mode", + "options": ["Sync", "Off"], "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "str", + "value": "Sync" + }, + "token": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Astra DB Application Token", + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "input_types": ["Message"], + "load_from_db": true, + "name": "token", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "ASTRA_DB_APPLICATION_TOKEN" } - } + }, + "tool_mode": false }, - "type": "File" + "type": "AstraDB" }, "dragging": false, - "height": 300, - "id": "File-RKdDQ", + "height": 749, + "id": "AstraDB-3buPx", "position": { - "x": 1418.981990122179, - "y": 1539.3825691184466 + "x": 1225.8151138573664, + "y": 369.2727294042354 }, "positionAbsolute": { - "x": 1418.981990122179, - "y": 1539.3825691184466 + "x": 1225.8151138573664, + "y": 369.2727294042354 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Implementation of Vector Store using Astra DB with search capabilities", - "display_name": "Astra DB", - "edited": false, - "id": "AstraVectorStoreComponent-wvuVK", + "id": "OpenAIEmbeddings-CeoV9", "node": { - "base_classes": [ - "Data", - "Retriever" - ], + "base_classes": ["Embeddings"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Implementation of Vector Store using Astra DB with search capabilities", - "display_name": "Astra DB", - "documentation": "https://python.langchain.com/docs/integrations/vectorstores/astradb", + "description": "Generate embeddings using OpenAI models.", + "display_name": "OpenAI Embeddings", + "documentation": "", "edited": false, "field_order": [ - "collection_name", - "token", - "api_endpoint", - "search_input", - "ingest_data", - "namespace", - "metric", - "batch_size", - "bulk_insert_batch_concurrency", - "bulk_insert_overwrite_concurrency", - "bulk_delete_concurrency", - "setup_mode", - "pre_delete_collection", - "metadata_indexing_include", - "embedding", - "metadata_indexing_exclude", - "collection_indexing_policy", - "number_of_results", - "search_type", - "search_score_threshold", - "search_filter" + "default_headers", + "default_query", + "chunk_size", + "client", + "deployment", + "embedding_ctx_length", + "max_retries", + "model", + "model_kwargs", + "openai_api_key", + "openai_api_base", + "openai_api_type", + "openai_api_version", + "openai_organization", + "openai_proxy", + "request_timeout", + "show_progress_bar", + "skip_empty", + "tiktoken_model_name", + "tiktoken_enable", + "dimensions" ], "frozen": false, - "icon": "AstraDB", + "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Retriever", - "method": "build_base_retriever", - "name": "base_retriever", - "selected": "Retriever", - "types": [ - "Retriever" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Search Results", - "method": "search_documents", - "name": "search_results", - "selected": "Data", - "types": [ - "Data" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Vector Store", - "method": "cast_vector_store", - "name": "vector_store", - "selected": "VectorStore", - "types": [ - "VectorStore" - ], + "display_name": "Embeddings", + "method": "build_embeddings", + "name": "embeddings", + "required_inputs": [], + "selected": "Embeddings", + "types": ["Embeddings"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "api_endpoint": { - "advanced": false, - "display_name": "API Endpoint", + "chunk_size": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Chunk Size", "dynamic": false, - "info": "API endpoint URL for the Astra DB service.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_endpoint", - "password": true, + "info": "", + "list": false, + "name": "chunk_size", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "str", - "value": "" + "trace_as_metadata": true, + "type": "int", + "value": 1000 }, - "batch_size": { + "client": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Batch Size", + "display_name": "Client", "dynamic": false, - "info": "Optional number of data to process in a single batch.", + "info": "", + "input_types": ["Message"], "list": false, - "name": "batch_size", + "load_from_db": false, + "name": "client", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", + "type": "str", "value": "" }, - "bulk_delete_concurrency": { + "code": { "advanced": true, - "display_name": "Bulk Delete Concurrency", - "dynamic": false, - "info": "Optional concurrency level for bulk delete operations.", + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", "list": false, - "name": "bulk_delete_concurrency", + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "type": "code", + "value": "from langchain_openai import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n MessageTextInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n MessageTextInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n client=self.client or None,\n model=self.model,\n dimensions=self.dimensions or None,\n deployment=self.deployment or None,\n api_version=self.openai_api_version or None,\n base_url=self.openai_api_base or None,\n openai_api_type=self.openai_api_type or None,\n openai_proxy=self.openai_proxy or None,\n embedding_ctx_length=self.embedding_ctx_length,\n api_key=self.openai_api_key or None,\n organization=self.openai_organization or None,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n max_retries=self.max_retries,\n timeout=self.request_timeout or None,\n tiktoken_enabled=self.tiktoken_enable,\n tiktoken_model_name=self.tiktoken_model_name or None,\n show_progress_bar=self.show_progress_bar,\n model_kwargs=self.model_kwargs,\n skip_empty=self.skip_empty,\n default_headers=self.default_headers or None,\n default_query=self.default_query or None,\n )\n" }, - "bulk_insert_batch_concurrency": { + "default_headers": { + "_input_type": "DictInput", "advanced": true, - "display_name": "Bulk Insert Batch Concurrency", + "display_name": "Default Headers", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations.", + "info": "Default headers to use for the API request.", "list": false, - "name": "bulk_insert_batch_concurrency", + "name": "default_headers", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "bulk_insert_overwrite_concurrency": { + "default_query": { + "_input_type": "DictInput", "advanced": true, - "display_name": "Bulk Insert Overwrite Concurrency", + "display_name": "Default Query", "dynamic": false, - "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", + "info": "Default query parameters to use for the API request.", "list": false, - "name": "bulk_insert_overwrite_concurrency", + "name": "default_query", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "code": { + "deployment": { + "_input_type": "MessageTextInput", "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", + "display_name": "Deployment", + "dynamic": false, "info": "", + "input_types": ["Message"], "list": false, "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, + "name": "deployment", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, - "type": "code", - "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" }, - "collection_indexing_policy": { + "dimensions": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Collection Indexing Policy", + "display_name": "Dimensions", "dynamic": false, - "info": "Optional dictionary defining the indexing policy for the collection.", + "info": "The number of dimensions the resulting output embeddings should have. Only supported by certain models.", "list": false, - "load_from_db": false, - "name": "collection_indexing_policy", + "name": "dimensions", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", + "type": "int", "value": "" }, - "collection_name": { - "advanced": false, - "display_name": "Collection Name", + "embedding_ctx_length": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Embedding Context Length", "dynamic": false, - "info": "The name of the collection within Astra DB where the vectors will be stored.", + "info": "", "list": false, - "load_from_db": false, - "name": "collection_name", + "name": "embedding_ctx_length", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "langflow" + "type": "int", + "value": 1536 }, - "embedding": { - "advanced": false, - "display_name": "Embedding or Astra Vectorize", + "max_retries": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Retries", "dynamic": false, - "info": "Allows either an embedding model or an Astra Vectorize configuration.", - "input_types": [ - "Embeddings", - "dict" - ], + "info": "", "list": false, - "name": "embedding", + "name": "max_retries", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "other", - "value": "" + "type": "int", + "value": 3 }, - "ingest_data": { + "model": { + "_input_type": "DropdownInput", "advanced": false, - "display_name": "Ingest Data", + "combobox": false, + "display_name": "Model", "dynamic": false, "info": "", - "input_types": [ - "Data" + "name": "model", + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" ], - "list": true, - "name": "ingest_data", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, + "tool_mode": false, "trace_as_metadata": true, - "type": "other", - "value": "" + "type": "str", + "value": "text-embedding-3-small" }, - "metadata_indexing_exclude": { + "model_kwargs": { + "_input_type": "DictInput", "advanced": true, - "display_name": "Metadata Indexing Exclude", + "display_name": "Model Kwargs", "dynamic": false, - "info": "Optional list of metadata fields to exclude from the indexing.", + "info": "", "list": false, - "load_from_db": false, - "name": "metadata_indexing_exclude", + "name": "model_kwargs", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "" + "trace_as_input": true, + "type": "dict", + "value": {} }, - "metadata_indexing_include": { + "openai_api_base": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Metadata Indexing Include", + "display_name": "OpenAI API Base", "dynamic": false, - "info": "Optional list of metadata fields to include in the indexing.", + "info": "", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "metadata_indexing_include", + "name": "openai_api_base", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "metric": { - "advanced": true, - "display_name": "Metric", + "openai_api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", "dynamic": false, - "info": "Optional distance metric for vector comparisons in the vector store.", - "name": "metric", - "options": [ - "cosine", - "dot_product", - "euclidean" - ], + "info": "", + "input_types": ["Message"], + "load_from_db": true, + "name": "openai_api_key", + "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_metadata": true, "type": "str", - "value": "" + "value": "OPENAI_API_KEY" }, - "namespace": { + "openai_api_type": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Namespace", + "display_name": "OpenAI API Type", "dynamic": false, - "info": "Optional namespace within Astra DB to use for the collection.", + "info": "", + "input_types": ["Message"], "list": false, "load_from_db": false, - "name": "namespace", + "name": "openai_api_type", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "number_of_results": { + "openai_api_version": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Number of Results", + "display_name": "OpenAI API Version", "dynamic": false, - "info": "Number of results to return.", + "info": "", + "input_types": ["Message"], "list": false, - "name": "number_of_results", + "load_from_db": false, + "name": "openai_api_version", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "int", - "value": 4 + "type": "str", + "value": "" }, - "pre_delete_collection": { + "openai_organization": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Pre Delete Collection", + "display_name": "OpenAI Organization", "dynamic": false, - "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "info": "", + "input_types": ["Message"], "list": false, - "name": "pre_delete_collection", + "load_from_db": false, + "name": "openai_organization", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "str", + "value": "" }, - "search_filter": { + "openai_proxy": { + "_input_type": "MessageTextInput", "advanced": true, - "display_name": "Search Metadata Filter", + "display_name": "OpenAI Proxy", "dynamic": false, - "info": "Optional dictionary of filters to apply to the search query.", - "list": true, - "name": "search_filter", + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "openai_proxy", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, - "type": "dict", - "value": {} + "trace_as_metadata": true, + "type": "str", + "value": "" }, - "search_input": { - "advanced": false, - "display_name": "Search Input", + "request_timeout": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Request Timeout", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], "list": false, - "load_from_db": false, - "multiline": true, - "name": "search_input", + "name": "request_timeout", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "float", "value": "" }, - "search_score_threshold": { + "show_progress_bar": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Search Score Threshold", + "display_name": "Show Progress Bar", "dynamic": false, - "info": "Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", + "info": "", "list": false, - "name": "search_score_threshold", + "name": "show_progress_bar", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "float", - "value": 0 + "type": "bool", + "value": false }, - "search_type": { + "skip_empty": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Search Type", + "display_name": "Skip Empty", "dynamic": false, - "info": "Search type to use", - "name": "search_type", - "options": [ - "Similarity", - "Similarity with score threshold", - "MMR (Max Marginal Relevance)" - ], + "info": "", + "list": false, + "name": "skip_empty", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "Similarity" + "type": "bool", + "value": false }, - "setup_mode": { + "tiktoken_enable": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Setup Mode", + "display_name": "TikToken Enable", "dynamic": false, - "info": "Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.", - "name": "setup_mode", - "options": [ - "Sync", - "Async", - "Off" - ], + "info": "If False, you must have transformers installed.", + "list": false, + "name": "tiktoken_enable", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", - "value": "Sync" + "type": "bool", + "value": true }, - "token": { - "advanced": false, - "display_name": "Astra DB Application Token", + "tiktoken_model_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "TikToken Model Name", "dynamic": false, - "info": "Authentication token for accessing Astra DB.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "token", - "password": true, + "info": "", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "tiktoken_model_name", "placeholder": "", - "required": true, + "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" } - } + }, + "tool_mode": false }, - "type": "AstraVectorStoreComponent" + "type": "OpenAIEmbeddings" }, "dragging": false, - "height": 753, - "id": "AstraVectorStoreComponent-wvuVK", + "height": 322, + "id": "OpenAIEmbeddings-CeoV9", "position": { - "x": 2678.506138892635, - "y": 1267.3353646037478 + "x": 825.435626932521, + "y": 739.6327999745448 }, "positionAbsolute": { - "x": 2678.506138892635, - "y": 1267.3353646037478 + "x": 825.435626932521, + "y": 739.6327999745448 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Generate embeddings using OpenAI models.", - "display_name": "OpenAI Embeddings", - "id": "OpenAIEmbeddings-rQV2h", + "id": "AstraDB-laybz", "node": { - "base_classes": [ - "Embeddings" - ], + "base_classes": ["Data", "Retriever"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Generate embeddings using OpenAI models.", - "display_name": "OpenAI Embeddings", - "documentation": "", + "description": "Implementation of Vector Store using Astra DB with search capabilities", + "display_name": "Astra DB", + "documentation": "https://docs.langflow.org/starter-projects-vector-store-rag", "edited": false, "field_order": [ - "default_headers", - "default_query", - "chunk_size", - "client", - "deployment", - "embedding_ctx_length", - "max_retries", - "model", - "model_kwargs", - "openai_api_base", - "openai_api_key", - "openai_api_type", - "openai_api_version", - "openai_organization", - "openai_proxy", - "request_timeout", - "show_progress_bar", - "skip_empty", - "tiktoken_model_name", - "tiktoken_enable", - "dimensions" + "token", + "api_endpoint", + "collection_name", + "search_input", + "ingest_data", + "namespace", + "embedding_service", + "embedding", + "metric", + "batch_size", + "bulk_insert_batch_concurrency", + "bulk_insert_overwrite_concurrency", + "bulk_delete_concurrency", + "setup_mode", + "pre_delete_collection", + "metadata_indexing_include", + "metadata_indexing_exclude", + "collection_indexing_policy", + "number_of_results", + "search_type", + "search_score_threshold", + "search_filter" ], "frozen": false, - "icon": "OpenAI", + "icon": "AstraDB", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Embeddings", - "method": "build_embeddings", - "name": "embeddings", - "selected": "Embeddings", - "types": [ - "Embeddings" - ], + "display_name": "Retriever", + "method": "build_base_retriever", + "name": "base_retriever", + "required_inputs": [], + "selected": "Retriever", + "types": ["Retriever"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Search Results", + "method": "search_documents", + "name": "search_results", + "required_inputs": ["api_endpoint", "collection_name", "token"], + "selected": "Data", + "types": ["Data"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "chunk_size": { - "advanced": true, - "display_name": "Chunk Size", + "api_endpoint": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "API Endpoint", "dynamic": false, - "info": "", - "list": false, - "name": "chunk_size", + "info": "API endpoint URL for the Astra DB service.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_endpoint", + "password": true, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1000 + "type": "str", + "value": "ASTRA_DB_API_ENDPOINT" }, - "client": { + "batch_size": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Client", + "display_name": "Batch Size", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], + "info": "Optional number of data to process in a single batch.", "list": false, - "load_from_db": false, - "name": "client", + "name": "batch_size", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "int", "value": "" }, - "code": { - "advanced": true, - "dynamic": true, - "fileTypes": [], - "file_path": "", - "info": "", - "list": false, - "load_from_db": false, - "multiline": true, - "name": "code", - "password": false, - "placeholder": "", - "required": true, - "show": true, - "title_case": false, - "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n dimensions=self.dimensions or None,\n )\n" - }, - "default_headers": { - "advanced": true, - "display_name": "Default Headers", - "dynamic": false, - "info": "Default headers to use for the API request.", - "list": false, - "name": "default_headers", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "default_query": { + "bulk_delete_concurrency": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Default Query", + "display_name": "Bulk Delete Concurrency", "dynamic": false, - "info": "Default query parameters to use for the API request.", + "info": "Optional concurrency level for bulk delete operations.", "list": false, - "name": "default_query", + "name": "bulk_delete_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "trace_as_metadata": true, + "type": "int", + "value": "" }, - "deployment": { + "bulk_insert_batch_concurrency": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Deployment", + "display_name": "Bulk Insert Batch Concurrency", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], + "info": "Optional concurrency level for bulk insert operations.", "list": false, - "load_from_db": false, - "name": "deployment", + "name": "bulk_insert_batch_concurrency", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, - "type": "str", + "type": "int", "value": "" }, - "dimensions": { + "bulk_insert_overwrite_concurrency": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Dimensions", + "display_name": "Bulk Insert Overwrite Concurrency", "dynamic": false, - "info": "The number of dimensions the resulting output embeddings should have. Only supported by certain models.", + "info": "Optional concurrency level for bulk insert operations that overwrite existing data.", "list": false, - "name": "dimensions", + "name": "bulk_insert_overwrite_concurrency", "placeholder": "", "required": false, "show": true, @@ -2343,206 +2684,206 @@ "type": "int", "value": "" }, - "embedding_ctx_length": { + "code": { "advanced": true, - "display_name": "Embedding Context Length", - "dynamic": false, + "dynamic": true, + "fileTypes": [], + "file_path": "", "info": "", "list": false, - "name": "embedding_ctx_length", + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1536 + "type": "code", + "value": "import os\n\nimport orjson\nfrom astrapy.admin import parse_api_endpoint\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput, MessageTextInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://docs.langflow.org/starter-projects-vector-store-rag\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n VECTORIZE_PROVIDERS_MAPPING = {\n \"Azure OpenAI\": [\"azureOpenAI\", [\"text-embedding-3-small\", \"text-embedding-3-large\", \"text-embedding-ada-002\"]],\n \"Hugging Face - Dedicated\": [\"huggingfaceDedicated\", [\"endpoint-defined-model\"]],\n \"Hugging Face - Serverless\": [\n \"huggingface\",\n [\n \"sentence-transformers/all-MiniLM-L6-v2\",\n \"intfloat/multilingual-e5-large\",\n \"intfloat/multilingual-e5-large-instruct\",\n \"BAAI/bge-small-en-v1.5\",\n \"BAAI/bge-base-en-v1.5\",\n \"BAAI/bge-large-en-v1.5\",\n ],\n ],\n \"Jina AI\": [\n \"jinaAI\",\n [\n \"jina-embeddings-v2-base-en\",\n \"jina-embeddings-v2-base-de\",\n \"jina-embeddings-v2-base-es\",\n \"jina-embeddings-v2-base-code\",\n \"jina-embeddings-v2-base-zh\",\n ],\n ],\n \"Mistral AI\": [\"mistral\", [\"mistral-embed\"]],\n \"NVIDIA\": [\"nvidia\", [\"NV-Embed-QA\"]],\n \"OpenAI\": [\"openai\", [\"text-embedding-3-small\", \"text-embedding-3-large\", \"text-embedding-ada-002\"]],\n \"Upstage\": [\"upstageAI\", [\"solar-embedding-1-large\"]],\n \"Voyage AI\": [\n \"voyageAI\",\n [\"voyage-large-2-instruct\", \"voyage-law-2\", \"voyage-code-2\", \"voyage-large-2\", \"voyage-2\"],\n ],\n }\n\n inputs = [\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n advanced=os.getenv(\"ASTRA_ENHANCED\", \"false\").lower() == \"true\",\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"Database\" if os.getenv(\"ASTRA_ENHANCED\", \"false\").lower() == \"true\" else \"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"embedding_choice\",\n display_name=\"Embedding Model or Astra Vectorize\",\n info=\"Determines whether to use Astra Vectorize for the collection.\",\n options=[\"Embedding Model\", \"Astra Vectorize\"],\n real_time_refresh=True,\n value=\"Embedding Model\",\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding Model\",\n input_types=[\"Embeddings\"],\n info=\"Allows an embedding model configuration.\",\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n value=\"cosine\",\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync' or 'Off'.\",\n options=[\"Sync\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n is_list=True,\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n is_list=True,\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info='Optional JSON string for the \"indexing\" field of the collection. '\n \"See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. \"\n \"(when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n def del_fields(self, build_config, field_list):\n for field in field_list:\n if field in build_config:\n del build_config[field]\n\n return build_config\n\n def insert_in_dict(self, build_config, field_name, new_parameters):\n # Insert the new key-value pair after the found key\n for new_field_name, new_parameter in new_parameters.items():\n # Get all the items as a list of tuples (key, value)\n items = list(build_config.items())\n\n # Find the index of the key to insert after\n idx = len(items)\n for i, (key, _) in enumerate(items):\n if key == field_name:\n idx = i + 1\n break\n\n items.insert(idx, (new_field_name, new_parameter))\n\n # Clear the original dictionary and update with the modified items\n build_config.clear()\n build_config.update(items)\n\n return build_config\n\n def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None):\n if field_name == \"embedding_choice\":\n if field_value == \"Astra Vectorize\":\n self.del_fields(build_config, [\"embedding\"])\n\n new_parameter = DropdownInput(\n name=\"embedding_provider\",\n display_name=\"Embedding Provider\",\n options=self.VECTORIZE_PROVIDERS_MAPPING.keys(),\n value=\"\",\n required=True,\n real_time_refresh=True,\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_choice\", {\"embedding_provider\": new_parameter})\n else:\n self.del_fields(\n build_config,\n [\n \"embedding_provider\",\n \"model\",\n \"z_01_model_parameters\",\n \"z_02_api_key_name\",\n \"z_03_provider_api_key\",\n \"z_04_authentication\",\n ],\n )\n\n new_parameter = HandleInput(\n name=\"embedding\",\n display_name=\"Embedding Model\",\n input_types=[\"Embeddings\"],\n info=\"Allows an embedding model configuration.\",\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_choice\", {\"embedding\": new_parameter})\n\n elif field_name == \"embedding_provider\":\n self.del_fields(\n build_config,\n [\"model\", \"z_01_model_parameters\", \"z_02_api_key_name\", \"z_03_provider_api_key\", \"z_04_authentication\"],\n )\n\n model_options = self.VECTORIZE_PROVIDERS_MAPPING[field_value][1]\n\n new_parameter = DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n info=\"The embedding model to use for the selected provider. Each provider has a different set of \"\n \"models available (full list at \"\n \"https://docs.datastax.com/en/astra-db-serverless/databases/embedding-generation.html):\\n\\n\"\n f\"{', '.join(model_options)}\",\n options=model_options,\n value=None,\n required=True,\n real_time_refresh=True,\n ).to_dict()\n\n self.insert_in_dict(build_config, \"embedding_provider\", {\"model\": new_parameter})\n\n elif field_name == \"model\":\n self.del_fields(\n build_config,\n [\"z_01_model_parameters\", \"z_02_api_key_name\", \"z_03_provider_api_key\", \"z_04_authentication\"],\n )\n\n new_parameter_1 = DictInput(\n name=\"z_01_model_parameters\",\n display_name=\"Model Parameters\",\n is_list=True,\n ).to_dict()\n\n new_parameter_2 = MessageTextInput(\n name=\"z_02_api_key_name\",\n display_name=\"API Key Name\",\n info=\"The name of the embeddings provider API key stored on Astra. \"\n \"If set, it will override the 'ProviderKey' in the authentication parameters.\",\n ).to_dict()\n\n new_parameter_3 = SecretStrInput(\n load_from_db=False,\n name=\"z_03_provider_api_key\",\n display_name=\"Provider API Key\",\n info=\"An alternative to the Astra Authentication that passes an API key for the provider \"\n \"with each request to Astra DB. \"\n \"This may be used when Vectorize is configured for the collection, \"\n \"but no corresponding provider secret is stored within Astra's key management system.\",\n ).to_dict()\n\n new_parameter_4 = DictInput(\n name=\"z_04_authentication\",\n display_name=\"Authentication Parameters\",\n is_list=True,\n ).to_dict()\n\n self.insert_in_dict(\n build_config,\n \"model\",\n {\n \"z_01_model_parameters\": new_parameter_1,\n \"z_02_api_key_name\": new_parameter_2,\n \"z_03_provider_api_key\": new_parameter_3,\n \"z_04_authentication\": new_parameter_4,\n },\n )\n\n return build_config\n\n def build_vectorize_options(self, **kwargs):\n for attribute in [\n \"embedding_provider\",\n \"model\",\n \"z_01_model_parameters\",\n \"z_02_api_key_name\",\n \"z_03_provider_api_key\",\n \"z_04_authentication\",\n ]:\n if not hasattr(self, attribute):\n setattr(self, attribute, None)\n\n # Fetch values from kwargs if any self.* attributes are None\n provider_value = self.VECTORIZE_PROVIDERS_MAPPING.get(self.embedding_provider, [None])[0] or kwargs.get(\n \"embedding_provider\"\n )\n model_name = self.model or kwargs.get(\"model\")\n authentication = {**(self.z_04_authentication or kwargs.get(\"z_04_authentication\", {}))}\n parameters = self.z_01_model_parameters or kwargs.get(\"z_01_model_parameters\", {})\n\n # Set the API key name if provided\n api_key_name = self.z_02_api_key_name or kwargs.get(\"z_02_api_key_name\")\n provider_key = self.z_03_provider_api_key or kwargs.get(\"z_03_provider_api_key\")\n if api_key_name:\n authentication[\"providerKey\"] = api_key_name\n\n # Set authentication and parameters to None if no values are provided\n if not authentication:\n authentication = None\n if not parameters:\n parameters = None\n\n return {\n # must match astrapy.info.CollectionVectorServiceOptions\n \"collection_vector_service_options\": {\n \"provider\": provider_value,\n \"modelName\": model_name,\n \"authentication\": authentication,\n \"parameters\": parameters,\n },\n \"collection_embedding_api_key\": provider_key,\n }\n\n @check_cached_vector_store\n def build_vector_store(self, vectorize_options=None):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError as e:\n msg = (\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n raise ImportError(msg) from e\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError as e:\n msg = f\"Invalid setup mode: {self.setup_mode}\"\n raise ValueError(msg) from e\n\n if self.embedding_choice == \"Embedding Model\":\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n # Fetch values from kwargs if any self.* attributes are None\n dict_options = vectorize_options or self.build_vectorize_options()\n\n # Set the embedding dictionary\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(\n dict_options.get(\"collection_vector_service_options\")\n ),\n \"collection_embedding_api_key\": dict_options.get(\"collection_embedding_api_key\"),\n }\n\n try:\n vector_store = AstraDBVectorStore(\n collection_name=self.collection_name,\n token=self.token,\n api_endpoint=self.api_endpoint,\n namespace=self.namespace or None,\n environment=parse_api_endpoint(self.api_endpoint).environment if self.api_endpoint else None,\n metric=self.metric or None,\n batch_size=self.batch_size or None,\n bulk_insert_batch_concurrency=self.bulk_insert_batch_concurrency or None,\n bulk_insert_overwrite_concurrency=self.bulk_insert_overwrite_concurrency or None,\n bulk_delete_concurrency=self.bulk_delete_concurrency or None,\n setup_mode=setup_mode_value,\n pre_delete_collection=self.pre_delete_collection,\n metadata_indexing_include=[s for s in self.metadata_indexing_include if s] or None,\n metadata_indexing_exclude=[s for s in self.metadata_indexing_exclude if s] or None,\n collection_indexing_policy=orjson.dumps(self.collection_indexing_policy)\n if self.collection_indexing_policy\n else None,\n **embedding_dict,\n )\n except Exception as e:\n msg = f\"Error initializing AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n\n self._add_documents_to_vector_store(vector_store)\n\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store) -> None:\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n msg = \"Vector Store Inputs must be Data objects.\"\n raise TypeError(msg)\n\n if documents:\n self.log(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n msg = f\"Error adding documents to AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n else:\n self.log(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self) -> str:\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n if self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self, vector_store=None) -> list[Data]:\n if not vector_store:\n vector_store = self.build_vector_store()\n\n self.log(f\"Search input: {self.search_input}\")\n self.log(f\"Search type: {self.search_type}\")\n self.log(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n msg = f\"Error performing search in AstraDBVectorStore: {e}\"\n raise ValueError(msg) from e\n\n self.log(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n self.log(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n self.log(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" }, - "max_retries": { + "collection_indexing_policy": { + "_input_type": "StrInput", "advanced": true, - "display_name": "Max Retries", + "display_name": "Collection Indexing Policy", "dynamic": false, - "info": "", + "info": "Optional JSON string for the \"indexing\" field of the collection. See https://docs.datastax.com/en/astra-db-serverless/api-reference/collections.html#the-indexing-option", "list": false, - "name": "max_retries", + "load_from_db": false, + "name": "collection_indexing_policy", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "int", - "value": 3 + "type": "str", + "value": "" }, - "model": { + "collection_name": { + "_input_type": "StrInput", "advanced": false, - "display_name": "Model", + "display_name": "Collection Name", "dynamic": false, - "info": "", - "name": "model", - "options": [ - "text-embedding-3-small", - "text-embedding-3-large", - "text-embedding-ada-002" - ], + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "list": false, + "load_from_db": false, + "name": "collection_name", "placeholder": "", - "required": false, + "required": true, "show": true, "title_case": false, "trace_as_metadata": true, "type": "str", - "value": "text-embedding-3-small" + "value": "test" }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", + "embedding": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Embedding Model", "dynamic": false, - "info": "", + "info": "Allows an embedding model configuration.", + "input_types": ["Embeddings"], "list": false, - "name": "model_kwargs", + "name": "embedding", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "trace_as_metadata": true, + "type": "other", + "value": "" }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", + "embedding_choice": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Embedding Model or Astra Vectorize", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "openai_api_base", - "password": true, + "info": "Determines whether to use Astra Vectorize for the collection.", + "name": "embedding_choice", + "options": ["Embedding Model", "Astra Vectorize"], "placeholder": "", + "real_time_refresh": true, "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, "type": "str", - "value": "" + "value": "Embedding Model" }, - "openai_api_key": { + "ingest_data": { + "_input_type": "DataInput", "advanced": false, - "display_name": "OpenAI API Key", + "display_name": "Ingest Data", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "openai_api_key", - "password": true, + "input_types": ["Data"], + "list": true, + "name": "ingest_data", "placeholder": "", "required": false, "show": true, "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "other", + "value": "" }, - "openai_api_type": { + "metadata_indexing_exclude": { + "_input_type": "StrInput", "advanced": true, - "display_name": "OpenAI API Type", + "display_name": "Metadata Indexing Exclude", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "openai_api_type", - "password": true, + "info": "Optional list of metadata fields to exclude from the indexing.", + "list": true, + "load_from_db": false, + "name": "metadata_indexing_exclude", "placeholder": "", "required": false, "show": true, "title_case": false, + "trace_as_metadata": true, "type": "str", "value": "" }, - "openai_api_version": { + "metadata_indexing_include": { + "_input_type": "StrInput", "advanced": true, - "display_name": "OpenAI API Version", + "display_name": "Metadata Indexing Include", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, + "info": "Optional list of metadata fields to include in the indexing.", + "list": true, "load_from_db": false, - "name": "openai_api_version", + "name": "metadata_indexing_include", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "openai_organization": { + "metric": { + "_input_type": "DropdownInput", "advanced": true, - "display_name": "OpenAI Organization", + "combobox": false, + "display_name": "Metric", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "openai_organization", + "info": "Optional distance metric for vector comparisons in the vector store.", + "name": "metric", + "options": ["cosine", "dot_product", "euclidean"], "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "cosine" }, - "openai_proxy": { + "namespace": { + "_input_type": "StrInput", "advanced": true, - "display_name": "OpenAI Proxy", + "display_name": "Namespace", "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], + "info": "Optional namespace within Astra DB to use for the collection.", "list": false, "load_from_db": false, - "name": "openai_proxy", + "name": "namespace", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, - "request_timeout": { + "number_of_results": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Request Timeout", + "display_name": "Number of Results", "dynamic": false, - "info": "", + "info": "Number of results to return.", "list": false, - "name": "request_timeout", + "name": "number_of_results", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "float", - "value": "" + "type": "int", + "value": 4 }, - "show_progress_bar": { + "pre_delete_collection": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Show Progress Bar", + "display_name": "Pre Delete Collection", "dynamic": false, - "info": "", + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", "list": false, - "name": "show_progress_bar", + "name": "pre_delete_collection", "placeholder": "", "required": false, "show": true, @@ -2551,84 +2892,141 @@ "type": "bool", "value": false }, - "skip_empty": { + "search_filter": { + "_input_type": "DictInput", "advanced": true, - "display_name": "Skip Empty", + "display_name": "Search Metadata Filter", + "dynamic": false, + "info": "Optional dictionary of filters to apply to the search query.", + "list": true, + "name": "search_filter", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "search_input": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Search Input", "dynamic": false, "info": "", + "input_types": ["Message"], "list": false, - "name": "skip_empty", + "load_from_db": false, + "multiline": true, + "name": "search_input", "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, "trace_as_metadata": true, - "type": "bool", - "value": false + "type": "str", + "value": "" }, - "tiktoken_enable": { + "search_score_threshold": { + "_input_type": "FloatInput", "advanced": true, - "display_name": "TikToken Enable", + "display_name": "Search Score Threshold", "dynamic": false, - "info": "If False, you must have transformers installed.", + "info": "Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')", "list": false, - "name": "tiktoken_enable", + "name": "search_score_threshold", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "bool", - "value": true + "type": "float", + "value": 0 }, - "tiktoken_model_name": { + "search_type": { + "_input_type": "DropdownInput", "advanced": true, - "display_name": "TikToken Model Name", + "combobox": false, + "display_name": "Search Type", "dynamic": false, - "info": "", - "input_types": [ - "Message" + "info": "Search type to use", + "name": "search_type", + "options": [ + "Similarity", + "Similarity with score threshold", + "MMR (Max Marginal Relevance)" ], - "list": false, - "load_from_db": false, - "name": "tiktoken_model_name", "placeholder": "", "required": false, "show": true, "title_case": false, - "trace_as_input": true, + "tool_mode": false, "trace_as_metadata": true, "type": "str", - "value": "" + "value": "Similarity" + }, + "setup_mode": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Setup Mode", + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like 'Sync' or 'Off'.", + "name": "setup_mode", + "options": ["Sync", "Off"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Sync" + }, + "token": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Astra DB Application Token", + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "input_types": ["Message"], + "load_from_db": true, + "name": "token", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "ASTRA_DB_APPLICATION_TOKEN" } - } + }, + "tool_mode": false }, - "type": "OpenAIEmbeddings" + "type": "AstraDB" }, "dragging": false, - "height": 394, - "id": "OpenAIEmbeddings-rQV2h", + "height": 749, + "id": "AstraDB-laybz", "position": { - "x": 2044.683126356786, - "y": 1785.2283494456522 + "x": 2090.491421890006, + "y": 1351.6194724621473 }, "positionAbsolute": { - "x": 2044.683126356786, - "y": 1785.2283494456522 + "x": 2090.491421890006, + "y": 1351.6194724621473 }, - "selected": true, + "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Generate embeddings using OpenAI models.", - "display_name": "OpenAI Embeddings", - "id": "OpenAIEmbeddings-EJT2O", + "id": "OpenAIEmbeddings-ANgku", "node": { - "base_classes": [ - "Embeddings" - ], + "base_classes": ["Embeddings"], "beta": false, "conditional_paths": [], "custom_fields": {}, @@ -2646,8 +3044,8 @@ "max_retries", "model", "model_kwargs", - "openai_api_base", "openai_api_key", + "openai_api_base", "openai_api_type", "openai_api_version", "openai_organization", @@ -2661,6 +3059,9 @@ ], "frozen": false, "icon": "OpenAI", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, "output_types": [], "outputs": [ { @@ -2668,10 +3069,9 @@ "display_name": "Embeddings", "method": "build_embeddings", "name": "embeddings", + "required_inputs": [], "selected": "Embeddings", - "types": [ - "Embeddings" - ], + "types": ["Embeddings"], "value": "__UNDEFINED__" } ], @@ -2679,6 +3079,7 @@ "template": { "_type": "Component", "chunk_size": { + "_input_type": "IntInput", "advanced": true, "display_name": "Chunk Size", "dynamic": false, @@ -2694,13 +3095,12 @@ "value": 1000 }, "client": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Client", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "client", @@ -2708,6 +3108,7 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", @@ -2729,9 +3130,10 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n SecretStrInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n tiktoken_enabled=self.tiktoken_enable,\n default_headers=self.default_headers,\n default_query=self.default_query,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n deployment=self.deployment,\n embedding_ctx_length=self.embedding_ctx_length,\n max_retries=self.max_retries,\n model=self.model,\n model_kwargs=self.model_kwargs,\n base_url=self.openai_api_base,\n api_key=self.openai_api_key,\n openai_api_type=self.openai_api_type,\n api_version=self.openai_api_version,\n organization=self.openai_organization,\n openai_proxy=self.openai_proxy,\n timeout=self.request_timeout or None,\n show_progress_bar=self.show_progress_bar,\n skip_empty=self.skip_empty,\n tiktoken_model_name=self.tiktoken_model_name,\n dimensions=self.dimensions or None,\n )\n" + "value": "from langchain_openai import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import BoolInput, DictInput, DropdownInput, FloatInput, IntInput, MessageTextInput, SecretStrInput\n\n\nclass OpenAIEmbeddingsComponent(LCEmbeddingsModel):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n icon = \"OpenAI\"\n name = \"OpenAIEmbeddings\"\n\n inputs = [\n DictInput(\n name=\"default_headers\",\n display_name=\"Default Headers\",\n advanced=True,\n info=\"Default headers to use for the API request.\",\n ),\n DictInput(\n name=\"default_query\",\n display_name=\"Default Query\",\n advanced=True,\n info=\"Default query parameters to use for the API request.\",\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n MessageTextInput(name=\"client\", display_name=\"Client\", advanced=True),\n MessageTextInput(name=\"deployment\", display_name=\"Deployment\", advanced=True),\n IntInput(name=\"embedding_ctx_length\", display_name=\"Embedding Context Length\", advanced=True, value=1536),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", value=3, advanced=True),\n DropdownInput(\n name=\"model\",\n display_name=\"Model\",\n advanced=False,\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=\"text-embedding-3-small\",\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n SecretStrInput(name=\"openai_api_key\", display_name=\"OpenAI API Key\", value=\"OPENAI_API_KEY\"),\n MessageTextInput(name=\"openai_api_base\", display_name=\"OpenAI API Base\", advanced=True),\n MessageTextInput(name=\"openai_api_type\", display_name=\"OpenAI API Type\", advanced=True),\n MessageTextInput(name=\"openai_api_version\", display_name=\"OpenAI API Version\", advanced=True),\n MessageTextInput(\n name=\"openai_organization\",\n display_name=\"OpenAI Organization\",\n advanced=True,\n ),\n MessageTextInput(name=\"openai_proxy\", display_name=\"OpenAI Proxy\", advanced=True),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n BoolInput(name=\"skip_empty\", display_name=\"Skip Empty\", advanced=True),\n MessageTextInput(\n name=\"tiktoken_model_name\",\n display_name=\"TikToken Model Name\",\n advanced=True,\n ),\n BoolInput(\n name=\"tiktoken_enable\",\n display_name=\"TikToken Enable\",\n advanced=True,\n value=True,\n info=\"If False, you must have transformers installed.\",\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n return OpenAIEmbeddings(\n client=self.client or None,\n model=self.model,\n dimensions=self.dimensions or None,\n deployment=self.deployment or None,\n api_version=self.openai_api_version or None,\n base_url=self.openai_api_base or None,\n openai_api_type=self.openai_api_type or None,\n openai_proxy=self.openai_proxy or None,\n embedding_ctx_length=self.embedding_ctx_length,\n api_key=self.openai_api_key or None,\n organization=self.openai_organization or None,\n allowed_special=\"all\",\n disallowed_special=\"all\",\n chunk_size=self.chunk_size,\n max_retries=self.max_retries,\n timeout=self.request_timeout or None,\n tiktoken_enabled=self.tiktoken_enable,\n tiktoken_model_name=self.tiktoken_model_name or None,\n show_progress_bar=self.show_progress_bar,\n model_kwargs=self.model_kwargs,\n skip_empty=self.skip_empty,\n default_headers=self.default_headers or None,\n default_query=self.default_query or None,\n )\n" }, "default_headers": { + "_input_type": "DictInput", "advanced": true, "display_name": "Default Headers", "dynamic": false, @@ -2747,6 +3149,7 @@ "value": {} }, "default_query": { + "_input_type": "DictInput", "advanced": true, "display_name": "Default Query", "dynamic": false, @@ -2762,13 +3165,12 @@ "value": {} }, "deployment": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "Deployment", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "deployment", @@ -2776,12 +3178,14 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "dimensions": { + "_input_type": "IntInput", "advanced": true, "display_name": "Dimensions", "dynamic": false, @@ -2797,6 +3201,7 @@ "value": "" }, "embedding_ctx_length": { + "_input_type": "IntInput", "advanced": true, "display_name": "Embedding Context Length", "dynamic": false, @@ -2812,6 +3217,7 @@ "value": 1536 }, "max_retries": { + "_input_type": "IntInput", "advanced": true, "display_name": "Max Retries", "dynamic": false, @@ -2827,7 +3233,9 @@ "value": 3 }, "model": { + "_input_type": "DropdownInput", "advanced": false, + "combobox": false, "display_name": "Model", "dynamic": false, "info": "", @@ -2841,11 +3249,13 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_metadata": true, "type": "str", "value": "text-embedding-3-small" }, "model_kwargs": { + "_input_type": "DictInput", "advanced": true, "display_name": "Model Kwargs", "dynamic": false, @@ -2861,31 +3271,32 @@ "value": {} }, "openai_api_base": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "OpenAI API Base", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, + "input_types": ["Message"], + "list": false, + "load_from_db": false, "name": "openai_api_base", - "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" }, "openai_api_key": { + "_input_type": "SecretStrInput", "advanced": false, "display_name": "OpenAI API Key", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "load_from_db": true, "name": "openai_api_key", "password": true, @@ -2897,31 +3308,32 @@ "value": "OPENAI_API_KEY" }, "openai_api_type": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "OpenAI API Type", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], - "load_from_db": true, + "input_types": ["Message"], + "list": false, + "load_from_db": false, "name": "openai_api_type", - "password": true, "placeholder": "", "required": false, "show": true, "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, "type": "str", "value": "" }, "openai_api_version": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "OpenAI API Version", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "openai_api_version", @@ -2929,19 +3341,19 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "openai_organization": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "OpenAI Organization", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "openai_organization", @@ -2949,19 +3361,19 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "openai_proxy": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "OpenAI Proxy", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "openai_proxy", @@ -2969,12 +3381,14 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" }, "request_timeout": { + "_input_type": "FloatInput", "advanced": true, "display_name": "Request Timeout", "dynamic": false, @@ -2990,6 +3404,7 @@ "value": "" }, "show_progress_bar": { + "_input_type": "BoolInput", "advanced": true, "display_name": "Show Progress Bar", "dynamic": false, @@ -3005,6 +3420,7 @@ "value": false }, "skip_empty": { + "_input_type": "BoolInput", "advanced": true, "display_name": "Skip Empty", "dynamic": false, @@ -3020,6 +3436,7 @@ "value": false }, "tiktoken_enable": { + "_input_type": "BoolInput", "advanced": true, "display_name": "TikToken Enable", "dynamic": false, @@ -3035,13 +3452,12 @@ "value": true }, "tiktoken_model_name": { + "_input_type": "MessageTextInput", "advanced": true, "display_name": "TikToken Model Name", "dynamic": false, "info": "", - "input_types": [ - "Message" - ], + "input_types": ["Message"], "list": false, "load_from_db": false, "name": "tiktoken_model_name", @@ -3049,109 +3465,69 @@ "required": false, "show": true, "title_case": false, + "tool_mode": false, "trace_as_input": true, "trace_as_metadata": true, "type": "str", "value": "" } - } + }, + "tool_mode": false }, "type": "OpenAIEmbeddings" }, "dragging": false, - "height": 394, - "id": "OpenAIEmbeddings-EJT2O", + "height": 322, + "id": "OpenAIEmbeddings-ANgku", "position": { - "x": 628.9252513328779, - "y": 648.6750537749285 + "x": 1690.9220896443658, + "y": 1866.483269483266 }, "positionAbsolute": { - "x": 628.9252513328779, - "y": 648.6750537749285 + "x": 1690.9220896443658, + "y": 1866.483269483266 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 }, { "data": { - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", - "id": "OpenAIModel-DUuku", + "id": "File-FJIuH", "node": { - "base_classes": [ - "LanguageModel", - "Message" - ], + "base_classes": ["Data"], "beta": false, "conditional_paths": [], "custom_fields": {}, - "description": "Generates text using OpenAI LLMs.", - "display_name": "OpenAI", + "description": "Load a file to be used in your project.", + "display_name": "File", "documentation": "", "edited": false, "field_order": [ - "input_value", - "system_message", - "stream", - "max_tokens", - "model_kwargs", - "json_mode", - "output_schema", - "model_name", - "openai_api_base", - "api_key", - "temperature", - "seed" + "path", + "silent_errors", + "use_multithreading", + "concurrency_multithreading" ], "frozen": false, - "icon": "OpenAI", + "icon": "file-text", + "legacy": false, + "metadata": {}, "output_types": [], "outputs": [ { "cache": true, - "display_name": "Text", - "method": "text_response", - "name": "text_output", - "selected": "Message", - "types": [ - "Message" - ], - "value": "__UNDEFINED__" - }, - { - "cache": true, - "display_name": "Language Model", - "method": "build_model", - "name": "model_output", - "selected": "LanguageModel", - "types": [ - "LanguageModel" - ], + "display_name": "Data", + "method": "load_file", + "name": "data", + "selected": "Data", + "types": ["Data"], "value": "__UNDEFINED__" } ], "pinned": false, "template": { "_type": "Component", - "api_key": { - "advanced": false, - "display_name": "OpenAI API Key", - "dynamic": false, - "info": "The OpenAI API Key to use for the OpenAI model.", - "input_types": [ - "Message" - ], - "load_from_db": true, - "name": "api_key", - "password": true, - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "type": "str", - "value": "OPENAI_API_KEY" - }, "code": { "advanced": true, "dynamic": true, @@ -3168,150 +3544,70 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" - }, - "input_value": { - "advanced": false, - "display_name": "Input", - "dynamic": false, - "info": "", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "input_value", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "json_mode": { - "advanced": true, - "display_name": "JSON Mode", - "dynamic": false, - "info": "If True, it will output JSON regardless of passing a schema.", - "list": false, - "name": "json_mode", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "bool", - "value": false + "value": "from pathlib import Path\nfrom tempfile import NamedTemporaryFile\nfrom zipfile import ZipFile, is_zipfile\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, IntInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n \"\"\"Handles loading of individual or zipped text files.\n\n Processes multiple valid files within a zip archive if provided.\n\n Attributes:\n display_name: Display name of the component.\n description: Brief component description.\n icon: Icon to represent the component.\n name: Identifier for the component.\n inputs: Inputs required by the component.\n outputs: Output of the component after processing files.\n \"\"\"\n\n display_name = \"File\"\n description = \"Load a file to be used in your project.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=[*TEXT_FILE_TYPES, \"zip\"],\n info=f\"Supported file types: {', '.join([*TEXT_FILE_TYPES, 'zip'])}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n BoolInput(\n name=\"use_multithreading\",\n display_name=\"Use Multithreading\",\n advanced=True,\n info=\"If true, parallel processing will be enabled for zip files.\",\n ),\n IntInput(\n name=\"concurrency_multithreading\",\n display_name=\"Multithreading Concurrency\",\n advanced=True,\n info=\"The maximum number of workers to use, if concurrency is enabled\",\n value=4,\n ),\n ]\n\n outputs = [Output(display_name=\"Data\", name=\"data\", method=\"load_file\")]\n\n def load_file(self) -> Data:\n \"\"\"Load and parse file(s) from a zip archive.\n\n Raises:\n ValueError: If no file is uploaded or file path is invalid.\n\n Returns:\n Data: Parsed data from file(s).\n \"\"\"\n # Check if the file path is provided\n if not self.path:\n self.log(\"File path is missing.\")\n msg = \"Please upload a file for processing.\"\n\n raise ValueError(msg)\n\n resolved_path = Path(self.resolve_path(self.path))\n try:\n # Check if the file is a zip archive\n if is_zipfile(resolved_path):\n self.log(f\"Processing zip file: {resolved_path.name}.\")\n\n return self._process_zip_file(\n resolved_path,\n silent_errors=self.silent_errors,\n parallel=self.use_multithreading,\n )\n\n self.log(f\"Processing single file: {resolved_path.name}.\")\n\n return self._process_single_file(resolved_path, silent_errors=self.silent_errors)\n except FileNotFoundError:\n self.log(f\"File not found: {resolved_path.name}.\")\n\n raise\n\n def _process_zip_file(self, zip_path: Path, *, silent_errors: bool = False, parallel: bool = False) -> Data:\n \"\"\"Process text files within a zip archive.\n\n Args:\n zip_path: Path to the zip file.\n silent_errors: Suppresses errors if True.\n parallel: Enables parallel processing if True.\n\n Returns:\n list[Data]: Combined data from all valid files.\n\n Raises:\n ValueError: If no valid files found in the archive.\n \"\"\"\n data: list[Data] = []\n with ZipFile(zip_path, \"r\") as zip_file:\n # Filter file names based on extensions in TEXT_FILE_TYPES and ignore hidden files\n valid_files = [\n name\n for name in zip_file.namelist()\n if (\n any(name.endswith(ext) for ext in TEXT_FILE_TYPES)\n and not name.startswith(\"__MACOSX\")\n and not name.startswith(\".\")\n )\n ]\n\n # Raise an error if no valid files found\n if not valid_files:\n self.log(\"No valid files in the zip archive.\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return data # type: ignore[return-value]\n\n # Raise an error if no valid files found\n msg = \"No valid files in the zip archive.\"\n raise ValueError(msg)\n\n # Define a function to process each file\n def process_file(file_name, silent_errors=silent_errors):\n with NamedTemporaryFile(delete=False) as temp_file:\n temp_path = Path(temp_file.name).with_name(file_name)\n with zip_file.open(file_name) as file_content:\n temp_path.write_bytes(file_content.read())\n try:\n return self._process_single_file(temp_path, silent_errors=silent_errors)\n finally:\n temp_path.unlink()\n\n # Process files in parallel if specified\n if parallel:\n self.log(\n f\"Initializing parallel Thread Pool Executor with max workers: \"\n f\"{self.concurrency_multithreading}.\"\n )\n\n # Process files in parallel\n initial_data = parallel_load_data(\n valid_files,\n silent_errors=silent_errors,\n load_function=process_file,\n max_concurrency=self.concurrency_multithreading,\n )\n\n # Filter out empty data\n data = list(filter(None, initial_data))\n else:\n # Sequential processing\n data = [process_file(file_name) for file_name in valid_files]\n\n self.log(f\"Successfully processed zip file: {zip_path.name}.\")\n\n return data # type: ignore[return-value]\n\n def _process_single_file(self, file_path: Path, *, silent_errors: bool = False) -> Data:\n \"\"\"Process a single file.\n\n Args:\n file_path: Path to the file.\n silent_errors: Suppresses errors if True.\n\n Returns:\n Data: Parsed data from the file.\n\n Raises:\n ValueError: For unsupported file formats.\n \"\"\"\n # Check if the file type is supported\n if not any(file_path.suffix == ext for ext in [\".\" + f for f in TEXT_FILE_TYPES]):\n self.log(f\"Unsupported file type: {file_path.suffix}\")\n\n # Return empty data if silent_errors is True\n if silent_errors:\n return Data()\n\n msg = f\"Unsupported file type: {file_path.suffix}\"\n raise ValueError(msg)\n\n try:\n # Parse the text file as appropriate\n data = parse_text_file_to_data(str(file_path), silent_errors=silent_errors) # type: ignore[assignment]\n if not data:\n data = Data()\n\n self.log(f\"Successfully processed file: {file_path.name}.\")\n except Exception as e:\n self.log(f\"Error processing file {file_path.name}: {e}\")\n\n # Return empty data if silent_errors is True\n if not silent_errors:\n raise\n\n data = Data()\n\n return data\n" }, - "max_tokens": { + "concurrency_multithreading": { + "_input_type": "IntInput", "advanced": true, - "display_name": "Max Tokens", + "display_name": "Multithreading Concurrency", "dynamic": false, - "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "info": "The maximum number of workers to use, if concurrency is enabled", "list": false, - "name": "max_tokens", + "name": "concurrency_multithreading", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, "type": "int", - "value": "" - }, - "model_kwargs": { - "advanced": true, - "display_name": "Model Kwargs", - "dynamic": false, - "info": "", - "list": false, - "name": "model_kwargs", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} + "value": 4 }, - "model_name": { + "path": { + "_input_type": "FileInput", "advanced": false, - "display_name": "Model Name", + "display_name": "Path", "dynamic": false, - "info": "", - "load_from_db": false, - "name": "model_name", - "options": [ - "gpt-4o-mini", - "gpt-4o", - "gpt-4-turbo", - "gpt-4-turbo-preview", - "gpt-4", - "gpt-3.5-turbo", - "gpt-3.5-turbo-0125" + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "zip" ], - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "str", - "value": "gpt-4o" - }, - "openai_api_base": { - "advanced": true, - "display_name": "OpenAI API Base", - "dynamic": false, - "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "file_path": "", + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx, zip", "list": false, - "load_from_db": false, - "name": "openai_api_base", + "name": "path", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "str", + "type": "file", "value": "" }, - "output_schema": { - "advanced": true, - "display_name": "Schema", - "dynamic": false, - "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", - "list": true, - "name": "output_schema", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "type": "dict", - "value": {} - }, - "seed": { - "advanced": true, - "display_name": "Seed", - "dynamic": false, - "info": "The seed controls the reproducibility of the job.", - "list": false, - "name": "seed", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_metadata": true, - "type": "int", - "value": 1 - }, - "stream": { + "silent_errors": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "Stream", + "display_name": "Silent Errors", "dynamic": false, - "info": "Stream the response from the model. Streaming works only in Chat.", + "info": "If true, errors will not raise an exception.", "list": false, - "name": "stream", + "name": "silent_errors", "placeholder": "", "required": false, "show": true, @@ -3320,70 +3616,56 @@ "type": "bool", "value": false }, - "system_message": { + "use_multithreading": { + "_input_type": "BoolInput", "advanced": true, - "display_name": "System Message", - "dynamic": false, - "info": "System message to pass to the model.", - "input_types": [ - "Message" - ], - "list": false, - "load_from_db": false, - "name": "system_message", - "placeholder": "", - "required": false, - "show": true, - "title_case": false, - "trace_as_input": true, - "trace_as_metadata": true, - "type": "str", - "value": "" - }, - "temperature": { - "advanced": false, - "display_name": "Temperature", + "display_name": "Use Multithreading", "dynamic": false, - "info": "", + "info": "If true, parallel processing will be enabled for zip files.", "list": false, - "name": "temperature", + "name": "use_multithreading", "placeholder": "", "required": false, "show": true, "title_case": false, "trace_as_metadata": true, - "type": "float", - "value": 0.1 + "type": "bool", + "value": false } - } + }, + "tool_mode": false }, - "type": "OpenAIModel" + "type": "File" }, "dragging": false, - "height": 621, - "id": "OpenAIModel-DUuku", + "height": 232, + "id": "File-FJIuH", "position": { - "x": 3138.7638747868177, - "y": 413.0859233500825 + "x": 1318.9043936921921, + "y": 1486.3263312921847 }, "positionAbsolute": { - "x": 3138.7638747868177, - "y": 413.0859233500825 + "x": 1318.9043936921921, + "y": 1486.3263312921847 }, "selected": false, "type": "genericNode", - "width": 384 + "width": 320 } ], "viewport": { - "x": -113.2164904186335, - "y": -51.497463142696176, - "zoom": 0.32587982414714645 + "x": -298.6563130974548, + "y": -137.6024801797489, + "zoom": 0.5239796558908366 } }, - "description": "Visit https://docs.langflow.org/tutorials/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", - "id": "74a6c5d0-9d9c-41c4-b6f1-92b20856d0de", + "description": "Get started with Retrieval-Augmented Generation (RAG) by ingesting data from documents and retrieving relevant chunks through vector similarity to provide contextual answers.", + "endpoint_name": null, + "icon": "Database", + "id": "c63bc197-85d6-4f39-87dc-2bc35523ec4e", + "gradient": "5", "is_component": false, - "last_tested_version": "1.0.12", - "name": "Vector Store RAG" -} \ No newline at end of file + "last_tested_version": "1.0.19.post2", + "name": "Vector Store RAG", + "tags": ["openai", "astradb", "rag", "q-a"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/YouTube Transcript Q&A.json b/src/backend/base/langflow/initial_setup/starter_projects/YouTube Transcript Q&A.json new file mode 100644 index 000000000000..5aa0bf98a9d6 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/YouTube Transcript Q&A.json @@ -0,0 +1,1433 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-B1nYa", + "name": "message", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "Agent-EGSx3", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-B1nYa{œdataTypeœ:œChatInputœ,œidœ:œChatInput-B1nYaœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-EGSx3{œfieldNameœ:œinput_valueœ,œidœ:œAgent-EGSx3œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-B1nYa", + "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-B1nYaœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", + "target": "Agent-EGSx3", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-EGSx3œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Agent", + "id": "Agent-EGSx3", + "name": "response", + "output_types": ["Message"] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-zUzVK", + "inputTypes": ["Message"], + "type": "str" + } + }, + "id": "reactflow__edge-Agent-EGSx3{œdataTypeœ:œAgentœ,œidœ:œAgent-EGSx3œ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-zUzVK{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-zUzVKœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "source": "Agent-EGSx3", + "sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-EGSx3œ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}", + "target": "ChatOutput-zUzVK", + "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-zUzVKœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "YouTubeTranscriptsComponent", + "id": "YouTubeTranscriptsComponent-n8Z9Y", + "name": "transcripts_tool", + "output_types": ["Tool"] + }, + "targetHandle": { + "fieldName": "tools", + "id": "Agent-EGSx3", + "inputTypes": ["Tool", "BaseTool", "StructuredTool"], + "type": "other" + } + }, + "id": "reactflow__edge-YouTubeTranscriptsComponent-n8Z9Y{œdataTypeœ:œYouTubeTranscriptsComponentœ,œidœ:œYouTubeTranscriptsComponent-n8Z9Yœ,œnameœ:œtranscripts_toolœ,œoutput_typesœ:[œToolœ]}-Agent-EGSx3{œfieldNameœ:œtoolsœ,œidœ:œAgent-EGSx3œ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}", + "source": "YouTubeTranscriptsComponent-n8Z9Y", + "sourceHandle": "{œdataTypeœ: œYouTubeTranscriptsComponentœ, œidœ: œYouTubeTranscriptsComponent-n8Z9Yœ, œnameœ: œtranscripts_toolœ, œoutput_typesœ: [œToolœ]}", + "target": "Agent-EGSx3", + "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-EGSx3œ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}" + } + ], + "nodes": [ + { + "data": { + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "id": "Agent-EGSx3", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Define the agent's instructions, then enter a task to complete using tools.", + "display_name": "Agent", + "documentation": "", + "edited": false, + "field_order": [ + "agent_llm", + "max_tokens", + "model_kwargs", + "json_mode", + "output_schema", + "model_name", + "openai_api_base", + "api_key", + "temperature", + "seed", + "output_parser", + "system_prompt", + "tools", + "input_value", + "handle_parsing_errors", + "verbose", + "max_iterations", + "agent_description", + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template", + "add_current_date_tool" + ], + "frozen": false, + "icon": "bot", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Response", + "method": "message_response", + "name": "response", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "add_current_date_tool": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Add tool Current Date", + "dynamic": false, + "info": "If true, will add a tool to the agent that returns the current date.", + "list": false, + "name": "add_current_date_tool", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "agent_description": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Agent Description", + "dynamic": false, + "info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "agent_description", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "A helpful assistant with access to the following tools:" + }, + "agent_llm": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Provider", + "dynamic": false, + "info": "The provider of the language model that the agent will use to generate responses.", + "input_types": [], + "name": "agent_llm", + "options": [ + "Amazon Bedrock", + "Anthropic", + "Azure OpenAI", + "Groq", + "NVIDIA", + "OpenAI", + "Custom" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "input_types": ["Message"], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import ALL_PROVIDER_FIELDS, MODEL_PROVIDERS_DICT\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import ToolCallingAgentComponent\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Add tool Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return self._build_llm_model(component_class, inputs, prefix), display_name\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n" + }, + "handle_parsing_errors": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Handle Parse Errors", + "dynamic": false, + "info": "Should the Agent fix errors when reading user input for better processing?", + "list": false, + "name": "handle_parsing_errors", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "input_value": { + "_input_type": "MessageTextInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input provided by the user for the agent to process.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "json_mode": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "JSON Mode", + "dynamic": false, + "info": "If True, it will output JSON regardless of passing a schema.", + "list": false, + "name": "json_mode", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "max_iterations": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Iterations", + "dynamic": false, + "info": "The maximum number of attempts the agent can make to complete its task before it stops.", + "list": false, + "name": "max_iterations", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 15 + }, + "max_tokens": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Tokens", + "dynamic": false, + "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", + "list": false, + "name": "max_tokens", + "placeholder": "", + "range_spec": { + "max": 128000, + "min": 0, + "step": 0.1, + "step_type": "float" + }, + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "memory": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": ["BaseChatMessageHistory"], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Model Name", + "dynamic": false, + "info": "", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-3.5-turbo-0125" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "n_messages": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "openai_api_base": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "OpenAI API Base", + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", + "list": false, + "load_from_db": false, + "name": "openai_api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "order": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": ["Ascending", "Descending"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "output_parser": { + "_input_type": "HandleInput", + "advanced": true, + "display_name": "Output Parser", + "dynamic": false, + "info": "The parser to use to parse the output of the model", + "input_types": ["OutputParser"], + "list": false, + "name": "output_parser", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "output_schema": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Schema", + "dynamic": false, + "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", + "list": true, + "name": "output_schema", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "seed": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Seed", + "dynamic": false, + "info": "The seed controls the reproducibility of the job.", + "list": false, + "name": "seed", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 1 + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Filter by sender type.", + "name": "sender", + "options": ["Machine", "User", "Machine and User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Filter by sender name.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "system_prompt": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Agent Instructions", + "dynamic": false, + "info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "system_prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "You are an AI assistant capable of fetching and analyzing YouTube video transcripts to answer user questions. You have access to a YouTube Transcripts tool that can extract spoken content from YouTube videos.\n\n\n1. First, attempt to fetch the transcript using the following settings:\n \n YouTube_Transcripts(\n url=\"{{VIDEO_URL}}\",\n transcript_format=\"text\",\n language=\"\",\n translation=\"\"\n )\n \n\n2. If you receive an error indicating that only a specific language is available (e.g., \"only pt is available\"), retry the function call with the correct language setting:\n \n YouTube_Transcripts(\n url=\"{{VIDEO_URL}}\",\n transcript_format=\"text\",\n language=\"[specified_language_code]\",\n translation=\"\"\n )\n \n\n3. Once you have successfully retrieved the transcript, analyze its content to answer the user's question.\n\n4. If you need to refer to specific parts of the video, you can make an additional call to get chunked transcripts:\n \n YouTube_Transcripts(\n url=\"{{VIDEO_URL}}\",\n transcript_format=\"chunks\",\n chunk_size_seconds=60,\n language=\"[language_used_in_successful_call]\",\n translation=\"\"\n )\n \n\nWhen answering the user's question:\n- Provide a clear and concise answer based on the information in the transcript.\n- If the question cannot be answered using the transcript alone, state this clearly and explain why.\n- If you need to quote the transcript, use quotation marks and provide context.\n- If referring to specific timestamps, mention them in your answer (only if you've retrieved chunked transcripts).\n\nRemember, your primary goal is to accurately answer the user's question using the information available in the video transcript. If you encounter any issues or if the question cannot be answered based on the transcript, explain this clearly in your response." + }, + "temperature": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "", + "list": false, + "name": "temperature", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "float", + "value": 0.1 + }, + "template": { + "_input_type": "MultilineInput", + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + }, + "tools": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Tools", + "dynamic": false, + "info": "These are the tools that the agent can use to help with tasks.", + "input_types": ["Tool", "BaseTool", "StructuredTool"], + "list": true, + "name": "tools", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "verbose": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verbose", + "dynamic": false, + "info": "", + "list": false, + "name": "verbose", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + } + }, + "tool_mode": false + }, + "type": "Agent" + }, + "dragging": false, + "height": 650, + "id": "Agent-EGSx3", + "position": { + "x": -631.1849833420482, + "y": -1088.2379740335518 + }, + "positionAbsolute": { + "x": -631.1849833420482, + "y": -1088.2379740335518 + }, + "selected": true, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "ChatInput-B1nYa", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n _background_color = self.background_color\n _text_color = self.text_color\n _icon = self.chat_icon\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": _background_color, \"text_color\": _text_color, \"icon\": _icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "can you see the transcript from this video and tell me what this is about? https://www.youtube.com/watch?v=UkV79sJAvz8" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 234, + "id": "ChatInput-B1nYa", + "position": { + "x": -996.5201766752447, + "y": -667.1593168473087 + }, + "positionAbsolute": { + "x": -996.5201766752447, + "y": -667.1593168473087 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-zUzVK", + "node": { + "base_classes": ["Message"], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Message", + "method": "message_response", + "name": "message", + "selected": "Message", + "types": ["Message"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, _id: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if _id:\n source_dict[\"id\"] = _id\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n def message_response(self) -> Message:\n _source, _icon, _display_name, _source_id = self.get_properties_from_source_component()\n _background_color = self.background_color\n _text_color = self.text_color\n if self.chat_icon:\n _icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(_source_id, _display_name, _source)\n message.properties.icon = _icon\n message.properties.background_color = _background_color\n message.properties.text_color = _text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Text", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": ["Machine", "User"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "ChatOutput" + }, + "dragging": false, + "height": 234, + "id": "ChatOutput-zUzVK", + "position": { + "x": -261.7796479132812, + "y": -677.0769470644535 + }, + "positionAbsolute": { + "x": -261.7796479132812, + "y": -677.0769470644535 + }, + "selected": false, + "type": "genericNode", + "width": 320 + }, + { + "data": { + "id": "note-f5vkx", + "node": { + "description": "# YouTube Transcript Q&A\nA quick way to ask questions about YouTube video content through transcript analysis!\n\n## Instructions\n1. **Input Your Query**\n - Paste YouTube video URL\n - Add your question about the content\n - Format: \"What is this video about? [URL]\"\n\n2. **Get Analysis**\n - System extracts video transcript\n - AI processes your question\n - Provides detailed answer from content\n\n3. **Additional Features**\n - Handles multiple languages\n - Can reference specific timestamps\n - Supports follow-up questions\n\n4. **Best Practices**\n - Ensure video has captions/subtitles\n - Ask specific questions\n - For timing details, mention timestamps\n\n5. **Common Uses**\n - Content summaries\n - Finding specific information\n - Understanding key points\n - Fact checking video claims\n\nRemember: Quality depends on available transcripts! 🎥💬", + "display_name": "", + "documentation": "", + "template": {} + }, + "type": "note" + }, + "dragging": false, + "height": 800, + "id": "note-f5vkx", + "position": { + "x": -1627.783352547462, + "y": -1212.3239149398214 + }, + "positionAbsolute": { + "x": -1627.783352547462, + "y": -1212.3239149398214 + }, + "resizing": false, + "selected": false, + "style": { + "height": 800, + "width": 600 + }, + "type": "noteNode", + "width": 600 + }, + { + "data": { + "id": "YouTubeTranscriptsComponent-n8Z9Y", + "node": { + "base_classes": ["Data", "Tool"], + "beta": false, + "category": "tools", + "conditional_paths": [], + "custom_fields": {}, + "description": "Extracts spoken content from YouTube videos as transcripts.", + "display_name": "YouTube Transcripts", + "documentation": "", + "edited": false, + "field_order": [ + "url", + "transcript_format", + "chunk_size_seconds", + "language", + "translation" + ], + "frozen": false, + "icon": "YouTube", + "key": "YouTubeTranscriptsComponent", + "legacy": false, + "lf_version": "1.0.19.post2", + "metadata": {}, + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Data", + "method": "build_youtube_transcripts", + "name": "transcripts", + "selected": "Data", + "types": ["Data"], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Tool", + "method": "build_youtube_tool", + "name": "transcripts_tool", + "selected": "Tool", + "types": ["Tool"], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "chunk_size_seconds": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Chunk Size (seconds)", + "dynamic": false, + "info": "The size of each transcript chunk in seconds. Only applicable when 'Transcript Format' is set to 'chunks'.", + "list": false, + "name": "chunk_size_seconds", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 60 + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langchain.tools import StructuredTool\nfrom langchain_community.document_loaders import YoutubeLoader\nfrom langchain_community.document_loaders.youtube import TranscriptFormat\nfrom langchain_core.tools import ToolException\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import DropdownInput, IntInput, MultilineInput\nfrom langflow.schema import Data\nfrom langflow.template import Output\n\n\nclass YoutubeApiSchema(BaseModel):\n \"\"\"Schema to define the input structure for the tool.\"\"\"\n\n url: str = Field(..., description=\"The YouTube URL to get transcripts from.\")\n transcript_format: TranscriptFormat = Field(\n TranscriptFormat.TEXT,\n description=\"The format of the transcripts. Either 'text' for a single \"\n \"text output or 'chunks' for timestamped chunks.\",\n )\n chunk_size_seconds: int = Field(\n 120,\n description=\"The size of each transcript chunk in seconds. Only \"\n \"applicable when 'Transcript Format' is set to 'chunks'.\",\n )\n language: str = Field(\n \"\",\n description=\"A comma-separated list of language codes in descending \" \"priority. Leave empty for default.\",\n )\n translation: str = Field(\n \"\", description=\"Translate the transcripts to the specified language. \" \"Leave empty for no translation.\"\n )\n\n\nclass YouTubeTranscriptsComponent(LCToolComponent):\n \"\"\"A component that extracts spoken content from YouTube videos as transcripts.\"\"\"\n\n display_name: str = \"YouTube Transcripts\"\n description: str = \"Extracts spoken content from YouTube videos as transcripts.\"\n icon: str = \"YouTube\"\n\n inputs = [\n MultilineInput(\n name=\"url\", display_name=\"Video URL\", info=\"Enter the YouTube video URL to get transcripts from.\"\n ),\n DropdownInput(\n name=\"transcript_format\",\n display_name=\"Transcript Format\",\n options=[\"text\", \"chunks\"],\n value=\"text\",\n info=\"The format of the transcripts. Either 'text' for a single output \"\n \"or 'chunks' for timestamped chunks.\",\n ),\n IntInput(\n name=\"chunk_size_seconds\",\n display_name=\"Chunk Size (seconds)\",\n value=60,\n advanced=True,\n info=\"The size of each transcript chunk in seconds. Only applicable when \"\n \"'Transcript Format' is set to 'chunks'.\",\n ),\n MultilineInput(\n name=\"language\",\n display_name=\"Language\",\n info=\"A comma-separated list of language codes in descending priority. \" \"Leave empty for default.\",\n ),\n DropdownInput(\n name=\"translation\",\n display_name=\"Translation Language\",\n advanced=True,\n options=[\"\", \"en\", \"es\", \"fr\", \"de\", \"it\", \"pt\", \"ru\", \"ja\", \"ko\", \"hi\", \"ar\", \"id\"],\n info=\"Translate the transcripts to the specified language. \" \"Leave empty for no translation.\",\n ),\n ]\n\n outputs = [\n Output(name=\"transcripts\", display_name=\"Data\", method=\"build_youtube_transcripts\"),\n Output(name=\"transcripts_tool\", display_name=\"Tool\", method=\"build_youtube_tool\"),\n ]\n\n def build_youtube_transcripts(self) -> Data | list[Data]:\n \"\"\"Method to build transcripts from the provided YouTube URL.\n\n Returns:\n Data | list[Data]: The transcripts of the video, either as a single\n Data object or a list of Data objects.\n \"\"\"\n try:\n loader = YoutubeLoader.from_youtube_url(\n self.url,\n transcript_format=TranscriptFormat.TEXT\n if self.transcript_format == \"text\"\n else TranscriptFormat.CHUNKS,\n chunk_size_seconds=self.chunk_size_seconds,\n language=self.language.split(\",\") if self.language else [\"en\"],\n translation=self.translation if self.translation else None,\n )\n\n transcripts = loader.load()\n\n if self.transcript_format == \"text\":\n # Extract only the page_content from the Document\n return Data(data={\"transcripts\": transcripts[0].page_content})\n # For chunks, extract page_content and metadata separately\n return [Data(data={\"content\": doc.page_content, \"metadata\": doc.metadata}) for doc in transcripts]\n\n except Exception as exc: # noqa: BLE001\n # Using a specific error type for the return value\n return Data(data={\"error\": f\"Failed to get YouTube transcripts: {exc!s}\"})\n\n def youtube_transcripts(\n self,\n url: str = \"\",\n transcript_format: TranscriptFormat = TranscriptFormat.TEXT,\n chunk_size_seconds: int = 120,\n language: str = \"\",\n translation: str = \"\",\n ) -> Data | list[Data]:\n \"\"\"Helper method to handle transcripts outside of component calls.\n\n Args:\n url: The YouTube URL to get transcripts from.\n transcript_format: Format of transcripts ('text' or 'chunks').\n chunk_size_seconds: Size of each transcript chunk in seconds.\n language: Comma-separated list of language codes.\n translation: Target language for translation.\n\n Returns:\n Data | list[Data]: Video transcripts as single Data or list of Data.\n \"\"\"\n try:\n if isinstance(transcript_format, str):\n transcript_format = TranscriptFormat(transcript_format)\n loader = YoutubeLoader.from_youtube_url(\n url,\n transcript_format=TranscriptFormat.TEXT\n if transcript_format == TranscriptFormat.TEXT\n else TranscriptFormat.CHUNKS,\n chunk_size_seconds=chunk_size_seconds,\n language=language.split(\",\") if language else [\"en\"],\n translation=translation if translation else None,\n )\n\n transcripts = loader.load()\n if transcript_format == TranscriptFormat.TEXT and len(transcripts) > 0:\n return Data(data={\"transcript\": transcripts[0].page_content})\n return [Data(data={\"content\": doc.page_content, \"metadata\": doc.metadata}) for doc in transcripts]\n except Exception as exc:\n msg = f\"Failed to get YouTube transcripts: {exc!s}\"\n raise ToolException(msg) from exc\n\n def build_youtube_tool(self) -> Tool:\n \"\"\"Method to build the transcripts tool.\n\n Returns:\n Tool: A structured tool that uses the transcripts method.\n\n Raises:\n RuntimeError: If tool creation fails.\n \"\"\"\n try:\n return StructuredTool.from_function(\n name=\"youtube_transcripts\",\n description=\"Get transcripts from YouTube videos.\",\n func=self.youtube_transcripts,\n args_schema=YoutubeApiSchema,\n )\n\n except Exception as exc:\n msg = f\"Failed to build the YouTube transcripts tool: {exc!s}\"\n raise RuntimeError(msg) from exc\n" + }, + "language": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Language", + "dynamic": false, + "info": "A comma-separated list of language codes in descending priority. Leave empty for default.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "language", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "transcript_format": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "display_name": "Transcript Format", + "dynamic": false, + "info": "The format of the transcripts. Either 'text' for a single output or 'chunks' for timestamped chunks.", + "name": "transcript_format", + "options": ["text", "chunks"], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "chunks" + }, + "translation": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "display_name": "Translation Language", + "dynamic": false, + "info": "Translate the transcripts to the specified language. Leave empty for no translation.", + "name": "translation", + "options": [ + "", + "en", + "es", + "fr", + "de", + "it", + "pt", + "ru", + "ja", + "ko", + "hi", + "ar", + "id" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "url": { + "_input_type": "MultilineInput", + "advanced": false, + "display_name": "Video URL", + "dynamic": false, + "info": "Enter the YouTube video URL to get transcripts from.", + "input_types": ["Message"], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "url", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "type": "YouTubeTranscriptsComponent" + }, + "dragging": false, + "height": 475, + "id": "YouTubeTranscriptsComponent-n8Z9Y", + "position": { + "x": -996.7508450845454, + "y": -1169.8625689107164 + }, + "positionAbsolute": { + "x": -996.7508450845454, + "y": -1169.8625689107164 + }, + "selected": false, + "type": "genericNode", + "width": 320 + } + ], + "viewport": { + "x": 1337.7257173603357, + "y": 1096.2297230048744, + "zoom": 0.7807596345995297 + } + }, + "description": "Quickly get detailed answers to questions about YouTube videos by analyzing their transcripts.", + "endpoint_name": null, + "icon": "Youtube", + "id": "3b33c431-9b8b-4ba1-9372-04b785e590d3", + "gradient": "3", + "is_component": false, + "last_tested_version": "1.0.19.post2", + "name": "YouTube Transcript Q&A", + "tags": ["agents", "content-generation", "rag", "q-a"] +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/__init__.py b/src/backend/base/langflow/initial_setup/starter_projects/__init__.py index b8770da14e4e..c9bdb7d42120 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/__init__.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/__init__.py @@ -1,19 +1,19 @@ from .basic_prompting import basic_prompting_graph from .blog_writer import blog_writer_graph +from .complex_agent import complex_agent_graph from .document_qa import document_qa_graph from .hierarchical_tasks_agent import hierarchical_tasks_agent_graph from .memory_chatbot import memory_chatbot_graph from .sequential_tasks_agent import sequential_tasks_agent_graph from .vector_store_rag import vector_store_rag_graph -from .complex_agent import complex_agent_graph __all__ = [ + "basic_prompting_graph", "blog_writer_graph", + "complex_agent_graph", "document_qa_graph", + "hierarchical_tasks_agent_graph", "memory_chatbot_graph", - "vector_store_rag_graph", - "basic_prompting_graph", "sequential_tasks_agent_graph", - "hierarchical_tasks_agent_graph", - "complex_agent_graph", + "vector_store_rag_graph", ] diff --git a/src/backend/base/langflow/initial_setup/starter_projects/basic_prompting.py b/src/backend/base/langflow/initial_setup/starter_projects/basic_prompting.py index 0f0f81ba7a2c..657138ee6b32 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/basic_prompting.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/basic_prompting.py @@ -1,8 +1,8 @@ -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.graph.graph.base import Graph +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.graph import Graph def basic_prompting_graph(template: str | None = None): @@ -26,5 +26,4 @@ def basic_prompting_graph(template: str | None = None): chat_output = ChatOutput() chat_output.set(input_value=openai_component.text_response) - graph = Graph(start=chat_input, end=chat_output) - return graph + return Graph(start=chat_input, end=chat_output) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/blog_writer.py b/src/backend/base/langflow/initial_setup/starter_projects/blog_writer.py index 9396ee651dc4..5ca700754983 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/blog_writer.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/blog_writer.py @@ -1,12 +1,12 @@ from textwrap import dedent -from langflow.components.data.URL import URLComponent -from langflow.components.helpers.ParseData import ParseDataComponent -from langflow.components.inputs.TextInput import TextInputComponent -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.graph.graph.base import Graph +from langflow.components.data import URLComponent +from langflow.components.inputs import TextInputComponent +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.processing import ParseDataComponent +from langflow.components.prompts import PromptComponent +from langflow.graph import Graph def blog_writer_graph(template: str | None = None): @@ -28,7 +28,8 @@ def blog_writer_graph(template: str | None = None): text_input = TextInputComponent(_display_name="Instructions") text_input.set( - input_value="Use the references above for style to write a new blog/tutorial about Langflow and AI. Suggest non-covered topics." + input_value="Use the references above for style to write a new blog/tutorial about Langflow and AI. " + "Suggest non-covered topics." ) prompt_component = PromptComponent() @@ -44,5 +45,4 @@ def blog_writer_graph(template: str | None = None): chat_output = ChatOutput() chat_output.set(input_value=openai_component.text_response) - graph = Graph(start=text_input, end=chat_output) - return graph + return Graph(start=text_input, end=chat_output) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/complex_agent.py b/src/backend/base/langflow/initial_setup/starter_projects/complex_agent.py index 21dfc2ea928d..73dee2caf982 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/complex_agent.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/complex_agent.py @@ -1,13 +1,12 @@ -from langflow.components.agents.CrewAIAgent import CrewAIAgentComponent -from langflow.components.agents.HierarchicalCrew import HierarchicalCrewComponent -from langflow.components.helpers.HierarchicalTask import HierarchicalTaskComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.components.tools.SearchAPI import SearchAPIComponent -from langflow.components.tools.YfinanceTool import YfinanceToolComponent -from langflow.graph.graph.base import Graph +from langflow.components.crewai.crewai import CrewAIAgentComponent +from langflow.components.crewai.hierarchical_crew import HierarchicalCrewComponent +from langflow.components.crewai.hierarchical_task import HierarchicalTaskComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.components.tools import SearchAPIComponent, YfinanceToolComponent +from langflow.graph import Graph def complex_agent_graph(): @@ -67,7 +66,8 @@ def complex_agent_graph(): template="""User's query: {query} -Respond to the user with as much as information as you can about the topic. Delete if needed. If it is just a general query (e.g a greeting) you can respond them directly.""", +Respond to the user with as much as information as you can about the topic. Delete if needed. +If it is just a general query (e.g a greeting) you can respond them directly.""", query=chat_input.message_response, ) manager_agent = CrewAIAgentComponent() @@ -89,10 +89,9 @@ def complex_agent_graph(): chat_output = ChatOutput() chat_output.set(input_value=crew_component.build_output) - graph = Graph( + return Graph( start=chat_input, end=chat_output, flow_name="Sequential Tasks Agent", description="This Agent runs tasks in a predefined sequence.", ) - return graph diff --git a/src/backend/base/langflow/initial_setup/starter_projects/document_qa.py b/src/backend/base/langflow/initial_setup/starter_projects/document_qa.py index b1d98a182c6e..562c94e90ba1 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/document_qa.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/document_qa.py @@ -1,10 +1,10 @@ -from langflow.components.data.File import FileComponent -from langflow.components.helpers.ParseData import ParseDataComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.graph.graph.base import Graph +from langflow.components.data import FileComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.processing import ParseDataComponent +from langflow.components.prompts import PromptComponent +from langflow.graph import Graph def document_qa_graph(template: str | None = None): @@ -40,5 +40,4 @@ def document_qa_graph(template: str | None = None): chat_output = ChatOutput() chat_output.set(input_value=openai_component.text_response) - graph = Graph(start=chat_input, end=chat_output) - return graph + return Graph(start=chat_input, end=chat_output) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/hierarchical_tasks_agent.py b/src/backend/base/langflow/initial_setup/starter_projects/hierarchical_tasks_agent.py index 9b00800e563c..5ecff69c302a 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/hierarchical_tasks_agent.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/hierarchical_tasks_agent.py @@ -1,12 +1,12 @@ -from langflow.components.agents.CrewAIAgent import CrewAIAgentComponent -from langflow.components.agents.HierarchicalCrew import HierarchicalCrewComponent -from langflow.components.helpers.HierarchicalTask import HierarchicalTaskComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.components.tools.SearchAPI import SearchAPIComponent -from langflow.graph.graph.base import Graph +from langflow.components.crewai.crewai import CrewAIAgentComponent +from langflow.components.crewai.hierarchical_crew import HierarchicalCrewComponent +from langflow.components.crewai.hierarchical_task import HierarchicalTaskComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.components.tools import SearchAPIComponent +from langflow.graph import Graph def hierarchical_tasks_agent_graph(): @@ -37,7 +37,8 @@ def hierarchical_tasks_agent_graph(): template="""User's query: {query} -Respond to the user with as much as information as you can about the topic. Delete if needed. If it is just a general query (e.g a greeting) you can respond them directly.""", +Respond to the user with as much as information as you can about the topic. Delete if needed. +If it is just a general query (e.g a greeting) you can respond them directly.""", query=chat_input.message_response, ) manager_agent = CrewAIAgentComponent() @@ -61,10 +62,9 @@ def hierarchical_tasks_agent_graph(): chat_output = ChatOutput() chat_output.set(input_value=crew_component.build_output) - graph = Graph( + return Graph( start=chat_input, end=chat_output, flow_name="Sequential Tasks Agent", description="This Agent runs tasks in a predefined sequence.", ) - return graph diff --git a/src/backend/base/langflow/initial_setup/starter_projects/memory_chatbot.py b/src/backend/base/langflow/initial_setup/starter_projects/memory_chatbot.py index 1b0f44b9de14..ea3cbd32fa2a 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/memory_chatbot.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/memory_chatbot.py @@ -1,8 +1,8 @@ -from langflow.components.helpers.Memory import MemoryComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent +from langflow.components.helpers.memory import MemoryComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent from langflow.graph import Graph @@ -24,5 +24,4 @@ def memory_chatbot_graph(template: str | None = None): chat_output = ChatOutput() chat_output.set(input_value=openai_component.text_response) - graph = Graph(chat_input, chat_output) - return graph + return Graph(chat_input, chat_output) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/sequential_tasks_agent.py b/src/backend/base/langflow/initial_setup/starter_projects/sequential_tasks_agent.py index 7af7c4ec6374..5b0664f9eada 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/sequential_tasks_agent.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/sequential_tasks_agent.py @@ -1,11 +1,11 @@ -from langflow.components.agents.SequentialCrew import SequentialCrewComponent -from langflow.components.agents.SequentialTaskAgent import SequentialTaskAgentComponent -from langflow.components.inputs.TextInput import TextInputComponent -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.components.tools.SearchAPI import SearchAPIComponent -from langflow.graph.graph.base import Graph +from langflow.components.crewai.sequential_crew import SequentialCrewComponent +from langflow.components.crewai.sequential_task_agent import SequentialTaskAgentComponent +from langflow.components.inputs import TextInputComponent +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.components.tools import SearchAPIComponent +from langflow.graph import Graph def sequential_tasks_agent_graph(): @@ -49,7 +49,8 @@ def sequential_tasks_agent_graph(): editor_task_agent = SequentialTaskAgentComponent() editor_task_agent.set( role="Editor", - goal="You should edit the information provided by the Researcher to make it more palatable and to not contain misleading information.", + goal="You should edit the information provided by the Researcher to make it more palatable and to not contain " + "misleading information.", backstory="You are the editor of the most reputable journal in the world.", llm=llm.build_model, task_description=revision_prompt_component.build_prompt, @@ -71,7 +72,8 @@ def sequential_tasks_agent_graph(): comedian_task_agent.set( role="Comedian", goal="You write comedic content based on the information provided by the editor.", - backstory="Your formal occupation is Comedian-in-Chief. You write jokes, do standup comedy, and write funny articles.", + backstory="Your formal occupation is Comedian-in-Chief. " + "You write jokes, do standup comedy, and write funny articles.", llm=llm.build_model, task_description=blog_prompt_component.build_prompt, expected_output="A small blog about the topic.", @@ -88,11 +90,9 @@ def sequential_tasks_agent_graph(): chat_output.set(input_value=crew_component.build_output) # Create the graph - graph = Graph( + return Graph( start=text_input, end=chat_output, flow_name="Sequential Tasks Agent", description="This Agent runs tasks in a predefined sequence.", ) - - return graph diff --git a/src/backend/base/langflow/initial_setup/starter_projects/vector_store_rag.py b/src/backend/base/langflow/initial_setup/starter_projects/vector_store_rag.py index b8679ad21cae..c7fe0ef0636d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/vector_store_rag.py +++ b/src/backend/base/langflow/initial_setup/starter_projects/vector_store_rag.py @@ -1,15 +1,15 @@ from textwrap import dedent -from langflow.components.data.File import FileComponent -from langflow.components.embeddings.OpenAIEmbeddings import OpenAIEmbeddingsComponent -from langflow.components.helpers.ParseData import ParseDataComponent -from langflow.components.helpers.SplitText import SplitTextComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.components.vectorstores.AstraDB import AstraVectorStoreComponent -from langflow.graph.graph.base import Graph +from langflow.components.data import FileComponent +from langflow.components.embeddings import OpenAIEmbeddingsComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.processing import ParseDataComponent +from langflow.components.processing.split_text import SplitTextComponent +from langflow.components.prompts import PromptComponent +from langflow.components.vectorstores import AstraVectorStoreComponent +from langflow.graph import Graph def ingestion_graph(): @@ -24,8 +24,7 @@ def ingestion_graph(): ingest_data=text_splitter.split_text, ) - ingestion_graph = Graph(file_component, vector_store) - return ingestion_graph + return Graph(file_component, vector_store) def rag_graph(): @@ -57,8 +56,7 @@ def rag_graph(): chat_output = ChatOutput() chat_output.set(input_value=openai_component.text_response) - graph = Graph(start=chat_input, end=chat_output) - return graph + return Graph(start=chat_input, end=chat_output) def vector_store_rag_graph(): diff --git a/src/backend/base/langflow/inputs/__init__.py b/src/backend/base/langflow/inputs/__init__.py index a68597da9676..b659ed3a985b 100644 --- a/src/backend/base/langflow/inputs/__init__.py +++ b/src/backend/base/langflow/inputs/__init__.py @@ -1,5 +1,6 @@ from .inputs import ( BoolInput, + CodeInput, DataInput, DefaultPromptField, DictInput, @@ -9,6 +10,7 @@ HandleInput, Input, IntInput, + LinkInput, MessageInput, MessageTextInput, MultilineInput, @@ -17,29 +19,36 @@ NestedDictInput, PromptInput, SecretStrInput, + SliderInput, StrInput, TableInput, ) __all__ = [ "BoolInput", + "CodeInput", "DataInput", + "DefaultPromptField", + "DefaultPromptField", "DictInput", "DropdownInput", - "MultiselectInput", "FileInput", "FloatInput", "HandleInput", + "Input", + "Input", "IntInput", + "LinkInput", + "LinkInput", "MessageInput", + "MessageTextInput", "MultilineInput", "MultilineSecretInput", + "MultiselectInput", "NestedDictInput", "PromptInput", "SecretStrInput", + "SliderInput", "StrInput", - "MessageTextInput", "TableInput", - "Input", - "DefaultPromptField", ] diff --git a/src/backend/base/langflow/inputs/input_mixin.py b/src/backend/base/langflow/inputs/input_mixin.py index 5e885c1a794a..b5a2098f8680 100644 --- a/src/backend/base/langflow/inputs/input_mixin.py +++ b/src/backend/base/langflow/inputs/input_mixin.py @@ -1,7 +1,14 @@ from enum import Enum -from typing import Annotated, Any, Optional +from typing import Annotated, Any -from pydantic import BaseModel, ConfigDict, Field, PlainSerializer, field_validator, model_serializer +from pydantic import ( + BaseModel, + ConfigDict, + Field, + PlainSerializer, + field_validator, + model_serializer, +) from langflow.field_typing.range_spec import RangeSpec from langflow.inputs.validators import CoalesceBool @@ -11,22 +18,25 @@ class FieldTypes(str, Enum): TEXT = "str" INTEGER = "int" - PASSWORD = "str" + PASSWORD = "str" # noqa: PIE796, S105 FLOAT = "float" BOOLEAN = "bool" DICT = "dict" NESTED_DICT = "NestedDict" FILE = "file" PROMPT = "prompt" + CODE = "code" OTHER = "other" TABLE = "table" + LINK = "link" + SLIDER = "slider" SerializableFieldTypes = Annotated[FieldTypes, PlainSerializer(lambda v: v.value, return_type=str)] # Base mixin for common input field attributes and methods -class BaseInputMixin(BaseModel, validate_assignment=True): # type: ignore +class BaseInputMixin(BaseModel, validate_assignment=True): # type: ignore[call-arg] model_config = ConfigDict( arbitrary_types_allowed=True, extra="forbid", @@ -50,27 +60,27 @@ class BaseInputMixin(BaseModel, validate_assignment=True): # type: ignore value: Any = "" """The value of the field. Default is an empty string.""" - display_name: Optional[str] = None + display_name: str | None = None """Display name of the field. Defaults to None.""" advanced: bool = False """Specifies if the field will an advanced parameter (hidden). Defaults to False.""" - input_types: Optional[list[str]] = None + input_types: list[str] | None = None """List of input types for the handle when the field has more than one type. Default is an empty list.""" dynamic: bool = False """Specifies if the field is dynamic. Defaults to False.""" - info: Optional[str] = "" + info: str | None = "" """Additional information about the field to be shown in the tooltip. Defaults to an empty string.""" - real_time_refresh: Optional[bool] = None + real_time_refresh: bool | None = None """Specifies if the field should have real time refresh. `refresh_button` must be False. Defaults to None.""" - refresh_button: Optional[bool] = None + refresh_button: bool | None = None """Specifies if the field should have a refresh button. Defaults to False.""" - refresh_button_text: Optional[str] = None + refresh_button_text: str | None = None """Specifies the text for the refresh button. Defaults to None.""" title_case: bool = False @@ -96,6 +106,10 @@ def serialize_model(self, handler): return dump +class ToolModeMixin(BaseModel): + tool_mode: bool = False + + class InputTraceMixin(BaseModel): trace_as_input: bool = True @@ -116,29 +130,32 @@ class DatabaseLoadMixin(BaseModel): # Specific mixin for fields needing file interaction class FileMixin(BaseModel): - file_path: Optional[str] = Field(default="") + file_path: str | None = Field(default="") file_types: list[str] = Field(default=[], alias="fileTypes") @field_validator("file_types") @classmethod def validate_file_types(cls, v): if not isinstance(v, list): - raise ValueError("file_types must be a list") + msg = "file_types must be a list" + raise ValueError(msg) # noqa: TRY004 # types should be a list of extensions without the dot for file_type in v: if not isinstance(file_type, str): - raise ValueError("file_types must be a list of strings") + msg = "file_types must be a list of strings" + raise ValueError(msg) # noqa: TRY004 if file_type.startswith("."): - raise ValueError("file_types should not start with a dot") + msg = "file_types should not start with a dot" + raise ValueError(msg) return v class RangeMixin(BaseModel): - range_spec: Optional[RangeSpec] = None + range_spec: RangeSpec | None = None class DropDownMixin(BaseModel): - options: Optional[list[str]] = None + options: list[str] | None = None """List of options for the field. Only used when is_list=True. Default is an empty list.""" combobox: CoalesceBool = False """Variable that defines if the user can insert custom values in the dropdown.""" @@ -148,8 +165,25 @@ class MultilineMixin(BaseModel): multiline: CoalesceBool = True +class LinkMixin(BaseModel): + icon: str | None = None + """Icon to be displayed in the link.""" + text: str | None = None + """Text to be displayed in the link.""" + + +class SliderMixin(BaseModel): + min_label: str = Field(default="") + max_label: str = Field(default="") + min_label_icon: str = Field(default="") + max_label_icon: str = Field(default="") + slider_buttons: bool = Field(default=False) + slider_buttons_options: list[str] = Field(default=[]) + slider_input: bool = Field(default=False) + + class TableMixin(BaseModel): - table_schema: Optional[TableSchema | list[Column]] = None + table_schema: TableSchema | list[Column] | None = None @field_validator("table_schema") @classmethod @@ -158,4 +192,5 @@ def validate_table_schema(cls, v): return TableSchema(columns=v) if isinstance(v, TableSchema): return v - raise ValueError("table_schema must be a TableSchema or a list of Columns") + msg = "table_schema must be a TableSchema or a list of Columns" + raise ValueError(msg) diff --git a/src/backend/base/langflow/inputs/inputs.py b/src/backend/base/langflow/inputs/inputs.py index d2c4f848de99..a7346b3f20bd 100644 --- a/src/backend/base/langflow/inputs/inputs.py +++ b/src/backend/base/langflow/inputs/inputs.py @@ -1,6 +1,8 @@ import warnings -from typing import Any, AsyncIterator, Iterator, Optional, Union, get_args +from collections.abc import AsyncIterator, Iterator +from typing import Any, get_args +from pandas import DataFrame from pydantic import Field, field_validator from langflow.inputs.validators import CoalesceBool @@ -15,12 +17,15 @@ FieldTypes, FileMixin, InputTraceMixin, + LinkMixin, ListableInputMixin, MetadataTraceMixin, MultilineMixin, RangeMixin, SerializableFieldTypes, + SliderMixin, TableMixin, + ToolModeMixin, ) @@ -32,20 +37,24 @@ class TableInput(BaseInputMixin, MetadataTraceMixin, TableMixin, ListableInputMi @classmethod def validate_value(cls, v: Any, _info): # Check if value is a list of dicts + if isinstance(v, DataFrame): + v = v.to_dict(orient="records") if not isinstance(v, list): - raise ValueError(f"TableInput value must be a list of dictionaries or Data. Value '{v}' is not a list.") + msg = f"TableInput value must be a list of dictionaries or Data. Value '{v}' is not a list." + raise ValueError(msg) # noqa: TRY004 for item in v: - if not isinstance(item, (dict, Data)): - raise ValueError( - f"TableInput value must be a list of dictionaries or Data. Item '{item}' is not a dictionary or Data." + if not isinstance(item, dict | Data): + msg = ( + "TableInput value must be a list of dictionaries or Data. " + f"Item '{item}' is not a dictionary or Data." ) + raise ValueError(msg) # noqa: TRY004 return v class HandleInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin): - """ - Represents an Input that has a Handle to a specific type (e.g. BaseLanguageModel, BaseRetriever, etc.) + """Represents an Input that has a Handle to a specific type (e.g. BaseLanguageModel, BaseRetriever, etc.). This class inherits from the `BaseInputMixin` and `ListableInputMixin` classes. @@ -58,9 +67,8 @@ class HandleInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin): field_type: SerializableFieldTypes = FieldTypes.OTHER -class DataInput(HandleInput, InputTraceMixin, ListableInputMixin): - """ - Represents an Input that has a Handle that receives a Data object. +class DataInput(HandleInput, InputTraceMixin, ListableInputMixin, ToolModeMixin): + """Represents an Input that has a Handle that receives a Data object. Attributes: input_types (list[str]): A list of input types supported by this data input. @@ -69,10 +77,14 @@ class DataInput(HandleInput, InputTraceMixin, ListableInputMixin): input_types: list[str] = ["Data"] -class PromptInput(BaseInputMixin, ListableInputMixin, InputTraceMixin): +class PromptInput(BaseInputMixin, ListableInputMixin, InputTraceMixin, ToolModeMixin): field_type: SerializableFieldTypes = FieldTypes.PROMPT +class CodeInput(BaseInputMixin, ListableInputMixin, InputTraceMixin): + field_type: SerializableFieldTypes = FieldTypes.CODE + + # Applying mixins to a specific input type class StrInput(BaseInputMixin, ListableInputMixin, DatabaseLoadMixin, MetadataTraceMixin): field_type: SerializableFieldTypes = FieldTypes.TEXT @@ -81,8 +93,7 @@ class StrInput(BaseInputMixin, ListableInputMixin, DatabaseLoadMixin, MetadataTr @staticmethod def _validate_value(v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -98,17 +109,21 @@ def _validate_value(v: Any, _info): # Keep the warning for now, but we should change it to an error if _info.data.get("input_types") and v.__class__.__name__ not in _info.data.get("input_types"): warnings.warn( - f"Invalid value type {type(v)} for input {_info.data.get('name')}. Expected types: {_info.data.get('input_types')}" + f"Invalid value type {type(v)} for input {_info.data.get('name')}. " + f"Expected types: {_info.data.get('input_types')}", + stacklevel=4, ) else: - warnings.warn(f"Invalid value type {type(v)} for input {_info.data.get('name')}.") + warnings.warn( + f"Invalid value type {type(v)} for input {_info.data.get('name')}.", + stacklevel=4, + ) return v @field_validator("value") @classmethod def validate_value(cls, v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -121,12 +136,7 @@ def validate_value(cls, v: Any, _info): ValueError: If the value is not of a valid type or if the input is missing a required key. """ is_list = _info.data["is_list"] - value = None - if is_list: - value = [cls._validate_value(vv, _info) for vv in v] - else: - value = cls._validate_value(v, _info) - return value + return [cls._validate_value(vv, _info) for vv in v] if is_list else cls._validate_value(v, _info) class MessageInput(StrInput, InputTraceMixin): @@ -139,27 +149,28 @@ def _validate_value(v: Any, _info): return Message(**v) if isinstance(v, Message): return v - if isinstance(v, str): + if isinstance(v, str | AsyncIterator | Iterator): return Message(text=v) - raise ValueError(f"Invalid value type {type(v)}") + msg = f"Invalid value type {type(v)}" + raise ValueError(msg) -class MessageTextInput(StrInput, MetadataTraceMixin, InputTraceMixin): - """ - Represents a text input component for the Langflow system. +class MessageTextInput(StrInput, MetadataTraceMixin, InputTraceMixin, ToolModeMixin): + """Represents a text input component for the Langflow system. - This component is used to handle text inputs in the Langflow system. It provides methods for validating and processing text values. + This component is used to handle text inputs in the Langflow system. + It provides methods for validating and processing text values. Attributes: - input_types (list[str]): A list of input types that this component supports. In this case, it supports the "Message" input type. + input_types (list[str]): A list of input types that this component supports. + In this case, it supports the "Message" input type. """ input_types: list[str] = ["Message"] @staticmethod def _validate_value(v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -184,20 +195,22 @@ def _validate_value(v: Any, _info): else: keys = ", ".join(v.data.keys()) input_name = _info.data["name"] - raise ValueError( + msg = ( f"The input to '{input_name}' must contain the key '{v.text_key}'." - f"You can set `text_key` to one of the following keys: {keys} or set the value using another Component." + f"You can set `text_key` to one of the following keys: {keys} " + "or set the value using another Component." ) - elif isinstance(v, (AsyncIterator, Iterator)): + raise ValueError(msg) + elif isinstance(v, AsyncIterator | Iterator): value = v else: - raise ValueError(f"Invalid value type {type(v)}") + msg = f"Invalid value type {type(v)}" + raise ValueError(msg) # noqa: TRY004 return value -class MultilineInput(MessageTextInput, MultilineMixin, InputTraceMixin): - """ - Represents a multiline input field. +class MultilineInput(MessageTextInput, MultilineMixin, InputTraceMixin, ToolModeMixin): + """Represents a multiline input field. Attributes: field_type (SerializableFieldTypes): The type of the field. Defaults to FieldTypes.TEXT. @@ -209,8 +222,7 @@ class MultilineInput(MessageTextInput, MultilineMixin, InputTraceMixin): class MultilineSecretInput(MessageTextInput, MultilineMixin, InputTraceMixin): - """ - Represents a multiline input field. + """Represents a multiline input field. Attributes: field_type (SerializableFieldTypes): The type of the field. Defaults to FieldTypes.TEXT. @@ -223,8 +235,7 @@ class MultilineSecretInput(MessageTextInput, MultilineMixin, InputTraceMixin): class SecretStrInput(BaseInputMixin, DatabaseLoadMixin): - """ - Represents a field with password field type. + """Represents a field with password field type. This class inherits from `BaseInputMixin` and `DatabaseLoadMixin`. @@ -242,8 +253,7 @@ class SecretStrInput(BaseInputMixin, DatabaseLoadMixin): @field_validator("value") @classmethod def validate_value(cls, v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -266,20 +276,24 @@ def validate_value(cls, v: Any, _info): else: keys = ", ".join(v.data.keys()) input_name = _info.data["name"] - raise ValueError( + msg = ( f"The input to '{input_name}' must contain the key '{v.text_key}'." - f"You can set `text_key` to one of the following keys: {keys} or set the value using another Component." + f"You can set `text_key` to one of the following keys: {keys} " + "or set the value using another Component." ) - elif isinstance(v, (AsyncIterator, Iterator)): + raise ValueError(msg) + elif isinstance(v, AsyncIterator | Iterator): value = v + elif v is None: + value = None else: - raise ValueError(f"Invalid value type `{type(v)}` for input `{_info.data['name']}`") + msg = f"Invalid value type `{type(v)}` for input `{_info.data['name']}`" + raise ValueError(msg) return value class IntInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMixin): - """ - Represents an integer field. + """Represents an integer field. This class represents an integer input and provides functionality for handling integer values. It inherits from the `BaseInputMixin`, `ListableInputMixin`, and `RangeMixin` classes. @@ -293,8 +307,7 @@ class IntInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMixi @field_validator("value") @classmethod def validate_value(cls, v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -306,17 +319,16 @@ def validate_value(cls, v: Any, _info): Raises: ValueError: If the value is not of a valid type or if the input is missing a required key. """ - - if v and not isinstance(v, (int, float)): - raise ValueError(f"Invalid value type {type(v)} for input {_info.data.get('name')}.") + if v and not isinstance(v, int | float): + msg = f"Invalid value type {type(v)} for input {_info.data.get('name')}." + raise ValueError(msg) if isinstance(v, float): v = int(v) return v class FloatInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMixin): - """ - Represents a float field. + """Represents a float field. This class represents a float input and provides functionality for handling float values. It inherits from the `BaseInputMixin`, `ListableInputMixin`, and `RangeMixin` classes. @@ -330,8 +342,7 @@ class FloatInput(BaseInputMixin, ListableInputMixin, RangeMixin, MetadataTraceMi @field_validator("value") @classmethod def validate_value(cls, v: Any, _info): - """ - Validates the given value and returns the processed value. + """Validates the given value and returns the processed value. Args: v (Any): The value to be validated. @@ -343,16 +354,16 @@ def validate_value(cls, v: Any, _info): Raises: ValueError: If the value is not of a valid type or if the input is missing a required key. """ - if v and not isinstance(v, (int, float)): - raise ValueError(f"Invalid value type {type(v)} for input {_info.data.get('name')}.") + if v and not isinstance(v, int | float): + msg = f"Invalid value type {type(v)} for input {_info.data.get('name')}." + raise ValueError(msg) if isinstance(v, int): v = float(v) return v class BoolInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin): - """ - Represents a boolean field. + """Represents a boolean field. This class represents a boolean input and provides functionality for handling boolean values. It inherits from the `BaseInputMixin` and `ListableInputMixin` classes. @@ -367,8 +378,7 @@ class BoolInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin): class NestedDictInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin, InputTraceMixin): - """ - Represents a nested dictionary field. + """Represents a nested dictionary field. This class represents a nested dictionary input and provides functionality for handling dictionary values. It inherits from the `BaseInputMixin` and `ListableInputMixin` classes. @@ -379,12 +389,11 @@ class NestedDictInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin, In """ field_type: SerializableFieldTypes = FieldTypes.NESTED_DICT - value: Optional[dict | Data] = {} + value: dict | Data | None = {} class DictInput(BaseInputMixin, ListableInputMixin, InputTraceMixin): - """ - Represents a dictionary field. + """Represents a dictionary field. This class represents a dictionary input and provides functionality for handling dictionary values. It inherits from the `BaseInputMixin` and `ListableInputMixin` classes. @@ -395,12 +404,11 @@ class DictInput(BaseInputMixin, ListableInputMixin, InputTraceMixin): """ field_type: SerializableFieldTypes = FieldTypes.DICT - value: Optional[dict] = {} + value: dict | None = {} -class DropdownInput(BaseInputMixin, DropDownMixin, MetadataTraceMixin): - """ - Represents a dropdown input field. +class DropdownInput(BaseInputMixin, DropDownMixin, MetadataTraceMixin, ToolModeMixin): + """Represents a dropdown input field. This class represents a dropdown input field and provides functionality for handling dropdown values. It inherits from the `BaseInputMixin` and `DropDownMixin` classes. @@ -417,8 +425,7 @@ class DropdownInput(BaseInputMixin, DropDownMixin, MetadataTraceMixin): class MultiselectInput(BaseInputMixin, ListableInputMixin, DropDownMixin, MetadataTraceMixin): - """ - Represents a multiselect input field. + """Represents a multiselect input field. This class represents a multiselect input field and provides functionality for handling multiselect values. It inherits from the `BaseInputMixin`, `ListableInputMixin` and `DropDownMixin` classes. @@ -439,16 +446,17 @@ class MultiselectInput(BaseInputMixin, ListableInputMixin, DropDownMixin, Metada def validate_value(cls, v: Any, _info): # Check if value is a list of dicts if not isinstance(v, list): - raise ValueError(f"MultiselectInput value must be a list. Value: '{v}'") + msg = f"MultiselectInput value must be a list. Value: '{v}'" + raise ValueError(msg) # noqa: TRY004 for item in v: if not isinstance(item, str): - raise ValueError(f"MultiselectInput value must be a list of strings. Item: '{item}' is not a string") + msg = f"MultiselectInput value must be a list of strings. Item: '{item}' is not a string" + raise ValueError(msg) # noqa: TRY004 return v class FileInput(BaseInputMixin, ListableInputMixin, FileMixin, MetadataTraceMixin): - """ - Represents a file field. + """Represents a file field. This class represents a file input and provides functionality for handling file values. It inherits from the `BaseInputMixin`, `ListableInputMixin`, and `FileMixin` classes. @@ -460,42 +468,52 @@ class FileInput(BaseInputMixin, ListableInputMixin, FileMixin, MetadataTraceMixi field_type: SerializableFieldTypes = FieldTypes.FILE +class LinkInput(BaseInputMixin, LinkMixin): + field_type: SerializableFieldTypes = FieldTypes.LINK + + +class SliderInput(BaseInputMixin, RangeMixin, SliderMixin): + field_type: SerializableFieldTypes = FieldTypes.SLIDER + + DEFAULT_PROMPT_INTUT_TYPES = ["Message", "Text"] class DefaultPromptField(Input): name: str - display_name: Optional[str] = None + display_name: str | None = None field_type: str = "str" - advanced: bool = False multiline: bool = True input_types: list[str] = DEFAULT_PROMPT_INTUT_TYPES value: Any = "" # Set the value to empty string -InputTypes = Union[ - Input, - DefaultPromptField, - BoolInput, - DataInput, - DictInput, - DropdownInput, - MultiselectInput, - FileInput, - FloatInput, - HandleInput, - IntInput, - MultilineInput, - MultilineSecretInput, - NestedDictInput, - PromptInput, - SecretStrInput, - StrInput, - MessageTextInput, - MessageInput, - TableInput, -] +InputTypes = ( + Input + | DefaultPromptField + | BoolInput + | DataInput + | DictInput + | DropdownInput + | MultiselectInput + | FileInput + | FloatInput + | HandleInput + | IntInput + | MultilineInput + | MultilineSecretInput + | NestedDictInput + | PromptInput + | CodeInput + | SecretStrInput + | StrInput + | MessageTextInput + | MessageInput + | TableInput + | LinkInput + | SliderInput +) InputTypesMap: dict[str, type[InputTypes]] = {t.__name__: t for t in get_args(InputTypes)} @@ -507,5 +525,5 @@ def instantiate_input(input_type: str, data: dict) -> InputTypes: data["field_type"] = data.pop("type") if input_type_class: return input_type_class(**data) - else: - raise ValueError(f"Invalid input type: {input_type}") + msg = f"Invalid input type: {input_type}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/inputs/utils.py b/src/backend/base/langflow/inputs/utils.py new file mode 100644 index 000000000000..8bca38efe43b --- /dev/null +++ b/src/backend/base/langflow/inputs/utils.py @@ -0,0 +1,32 @@ +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from langflow.inputs.inputs import InputTypes, InputTypesMap +else: + InputTypes = Any + InputTypesMap = Any + +# Lazy import for InputTypesMap +_InputTypesMap: dict[str, type["InputTypes"]] | None = None + + +def get_input_types_map(): + global _InputTypesMap # noqa: PLW0603 + if _InputTypesMap is None: + from langflow.inputs.inputs import InputTypesMap + + _InputTypesMap = InputTypesMap + return _InputTypesMap + + +def instantiate_input(input_type: str, data: dict) -> InputTypes: + input_types_map = get_input_types_map() + + input_type_class = input_types_map.get(input_type) + if "type" in data: + # Replace with field_type + data["field_type"] = data.pop("type") + if input_type_class: + return input_type_class(**data) + msg = f"Invalid input type: {input_type}" + raise ValueError(msg) diff --git a/src/backend/base/langflow/inputs/validators.py b/src/backend/base/langflow/inputs/validators.py index 7056265f89a3..467bd77d6f29 100644 --- a/src/backend/base/langflow/inputs/validators.py +++ b/src/backend/base/langflow/inputs/validators.py @@ -3,7 +3,7 @@ from pydantic import PlainValidator -def validate_boolean(value: bool) -> bool: +def validate_boolean(value: bool) -> bool: # noqa: FBT001 valid_trues = ["True", "true", "1", "yes"] valid_falses = ["False", "false", "0", "no"] if value in valid_trues: @@ -12,8 +12,8 @@ def validate_boolean(value: bool) -> bool: return False if isinstance(value, bool): return value - else: - raise ValueError("Value must be a boolean") + msg = "Value must be a boolean" + raise ValueError(msg) CoalesceBool = Annotated[bool, PlainValidator(validate_boolean)] diff --git a/src/backend/base/langflow/interface/importing/utils.py b/src/backend/base/langflow/interface/importing/utils.py index 963a51ccb8a3..537a5c4e934e 100644 --- a/src/backend/base/langflow/interface/importing/utils.py +++ b/src/backend/base/langflow/interface/importing/utils.py @@ -5,7 +5,7 @@ def import_module(module_path: str) -> Any: - """Import module from module path""" + """Import module from module path.""" if "from" not in module_path: # Import the module using the module path return importlib.import_module(module_path) @@ -19,7 +19,7 @@ def import_module(module_path: str) -> Any: def import_class(class_path: str) -> Any: - """Import class from class path""" + """Import class from class path.""" module_path, class_name = class_path.rsplit(".", 1) module = import_module(module_path) return getattr(module, class_name) diff --git a/src/backend/base/langflow/interface/initialize/loading.py b/src/backend/base/langflow/interface/initialize/loading.py index f0ae915adc39..9bd831112e11 100644 --- a/src/backend/base/langflow/interface/initialize/loading.py +++ b/src/backend/base/langflow/interface/initialize/loading.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import inspect import os import warnings -from typing import TYPE_CHECKING, Any, Type +from typing import TYPE_CHECKING, Any import orjson from loguru import logger @@ -14,52 +16,58 @@ if TYPE_CHECKING: from langflow.custom import Component, CustomComponent + from langflow.events.event_manager import EventManager from langflow.graph.vertex.base import Vertex -async def instantiate_class( - vertex: "Vertex", +def instantiate_class( + vertex: Vertex, user_id=None, + event_manager: EventManager | None = None, ) -> Any: - """Instantiate class from module type and key, and params""" - + """Instantiate class from module type and key, and params.""" vertex_type = vertex.vertex_type base_type = vertex.base_type logger.debug(f"Instantiating {vertex_type} of type {base_type}") if not base_type: - raise ValueError("No base type provided for vertex") + msg = "No base type provided for vertex" + raise ValueError(msg) custom_params = get_params(vertex.params) code = custom_params.pop("code") - class_object: Type["CustomComponent" | "Component"] = eval_custom_component_code(code) - custom_component: "CustomComponent" | "Component" = class_object( + class_object: type[CustomComponent | Component] = eval_custom_component_code(code) + custom_component: CustomComponent | Component = class_object( _user_id=user_id, _parameters=custom_params, _vertex=vertex, _tracing_service=get_tracing_service(), + _id=vertex.id, ) + if hasattr(custom_component, "set_event_manager"): + custom_component.set_event_manager(event_manager) return custom_component, custom_params async def get_instance_results( custom_component, custom_params: dict, - vertex: "Vertex", + vertex: Vertex, + *, fallback_to_env_vars: bool = False, base_type: str = "component", ): custom_params = update_params_with_load_from_db_fields( - custom_component, custom_params, vertex.load_from_db_fields, fallback_to_env_vars + custom_component, custom_params, vertex.load_from_db_fields, fallback_to_env_vars=fallback_to_env_vars ) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) if base_type == "custom_components": return await build_custom_component(params=custom_params, custom_component=custom_component) - elif base_type == "component": + if base_type == "component": return await build_component(params=custom_params, custom_component=custom_component) - else: - raise ValueError(f"Base type {base_type} not found.") + msg = f"Base type {base_type} not found." + raise ValueError(msg) def get_params(vertex_params): @@ -70,7 +78,7 @@ def get_params(vertex_params): def convert_params_to_sets(params): - """Convert certain params to sets""" + """Convert certain params to sets.""" if "allowed_special" in params: params["allowed_special"] = set(params["allowed_special"]) if "disallowed_special" in params: @@ -82,12 +90,11 @@ def convert_kwargs(params): # Loop through items to avoid repeated lookups items_to_remove = [] for key, value in params.items(): - if "kwargs" in key or "config" in key: - if isinstance(value, str): - try: - params[key] = orjson.loads(value) - except orjson.JSONDecodeError: - items_to_remove.append(key) + if ("kwargs" in key or "config" in key) and isinstance(value, str): + try: + params[key] = orjson.loads(value) + except orjson.JSONDecodeError: + items_to_remove.append(key) # Remove invalid keys outside the loop to avoid modifying dict during iteration for key in items_to_remove: @@ -97,50 +104,41 @@ def convert_kwargs(params): def update_params_with_load_from_db_fields( - custom_component: "CustomComponent", + custom_component: CustomComponent, params, load_from_db_fields, + *, fallback_to_env_vars=False, ): - # For each field in load_from_db_fields, we will check if it's in the params - # and if it is, we will get the value from the custom_component.keys(name) - # and update the params with the value for field in load_from_db_fields: - if field in params: - try: - key = None - try: - key = custom_component.variables(params[field], field) - except ValueError as e: - # check if "User id is not set" is in the error message - if "User id is not set" in str(e) and not fallback_to_env_vars: - raise e - logger.debug(str(e)) - if fallback_to_env_vars and key is None: - var = os.getenv(params[field]) - if var is None: - raise ValueError(f"Environment variable {params[field]} is not set.") - key = var - logger.info(f"Using environment variable {params[field]} for {field}") - if key is None: - logger.warning(f"Could not get value for {field}. Setting it to None.") - - params[field] = key - - except TypeError as exc: - raise exc - - except Exception as exc: - logger.error(f"Failed to get value for {field} from custom component. Setting it to None. Error: {exc}") - - params[field] = None + if field not in params: + continue + + try: + key = custom_component.variables(params[field], field) + except ValueError as e: + if any(reason in str(e) for reason in ["User id is not set", "variable not found."]): + raise + logger.debug(str(e)) + key = None + + if fallback_to_env_vars and key is None: + key = os.getenv(params[field]) + if key: + logger.info(f"Using environment variable {params[field]} for {field}") + else: + logger.error(f"Environment variable {params[field]} is not set.") + + params[field] = key if key is not None else None + if key is None: + logger.warning(f"Could not get value for {field}. Setting it to None.") return params async def build_component( params: dict, - custom_component: "Component", + custom_component: Component, ): # Now set the params as attributes of the custom_component custom_component.set_attributes(params) @@ -149,7 +147,7 @@ async def build_component( return custom_component, build_results, artifacts -async def build_custom_component(params: dict, custom_component: "CustomComponent"): +async def build_custom_component(params: dict, custom_component: CustomComponent): if "retriever" in params and hasattr(params["retriever"], "as_retriever"): params["retriever"] = params["retriever"].as_retriever() @@ -169,7 +167,7 @@ async def build_custom_component(params: dict, custom_component: "CustomComponen # Call the build method directly if it's sync build_result = custom_component.build(**params) custom_repr = custom_component.custom_repr() - if custom_repr is None and isinstance(build_result, (dict, Data, str)): + if custom_repr is None and isinstance(build_result, dict | Data | str): custom_repr = build_result if not isinstance(custom_repr, str): custom_repr = str(custom_repr) @@ -179,7 +177,7 @@ async def build_custom_component(params: dict, custom_component: "CustomComponen elif hasattr(raw, "model_dump") and raw is not None: raw = raw.model_dump() - if raw is None and isinstance(build_result, (dict, Data, str)): + if raw is None and isinstance(build_result, dict | Data | str): raw = build_result.data if isinstance(build_result, Data) else build_result artifact_type = get_artifact_type(custom_component.repr_value or raw, build_result) @@ -191,4 +189,5 @@ async def build_custom_component(params: dict, custom_component: "CustomComponen custom_component._results = {custom_component._vertex.outputs[0].get("name"): build_result} return custom_component, build_result, artifact - raise ValueError("Custom component does not have a vertex") + msg = "Custom component does not have a vertex" + raise ValueError(msg) diff --git a/src/backend/base/langflow/interface/listing.py b/src/backend/base/langflow/interface/listing.py index a51e676dbe13..d7f88b311037 100644 --- a/src/backend/base/langflow/interface/listing.py +++ b/src/backend/base/langflow/interface/listing.py @@ -3,13 +3,9 @@ class AllTypesDict(LazyLoadDictBase): - def __init__(self): + def __init__(self) -> None: self._all_types_dict = None - @property - def ALL_TYPES_DICT(self): - return self.all_types_dict - def _build_dict(self): langchain_types_dict = self.get_type_dict() return { diff --git a/src/backend/base/langflow/interface/run.py b/src/backend/base/langflow/interface/run.py index 283c752e8216..aa2051f7da3c 100644 --- a/src/backend/base/langflow/interface/run.py +++ b/src/backend/base/langflow/interface/run.py @@ -2,7 +2,8 @@ def get_memory_key(langchain_object): - """ + """Get the memory key from the LangChain object's memory attribute. + Given a LangChain object, this function retrieves the current memory key from the object's memory attribute. It then checks if the key exists in a dictionary of known memory keys and returns the corresponding key, or None if the current key is not recognized. @@ -15,31 +16,29 @@ def get_memory_key(langchain_object): if hasattr(langchain_object.memory, "memory_key"): memory_key = langchain_object.memory.memory_key return mem_key_dict.get(memory_key) - else: - return None # or some other default value or action + return None # or some other default value or action -def update_memory_keys(langchain_object, possible_new_mem_key): - """ +def update_memory_keys(langchain_object, possible_new_mem_key) -> None: + """Update the memory keys in the LangChain object's memory attribute. + Given a LangChain object and a possible new memory key, this function updates the input and output keys in the object's memory attribute to exclude the current memory key and the possible new key. It then sets the memory key to the possible new key. """ - input_key = [ + input_key = next( key for key in langchain_object.input_keys - if key not in [langchain_object.memory.memory_key, possible_new_mem_key] - ][0] + if key not in {langchain_object.memory.memory_key, possible_new_mem_key} + ) - output_key = [ + output_key = next( key for key in langchain_object.output_keys - if key not in [langchain_object.memory.memory_key, possible_new_mem_key] - ][0] + if key not in {langchain_object.memory.memory_key, possible_new_mem_key} + ) - keys = [input_key, output_key, possible_new_mem_key] - attrs = ["input_key", "output_key", "memory_key"] - for key, attr in zip(keys, attrs): + for key, attr in [(input_key, "input_key"), (output_key, "output_key"), (possible_new_mem_key, "memory_key")]: try: setattr(langchain_object.memory, attr, key) except ValueError as exc: diff --git a/src/backend/base/langflow/interface/types.py b/src/backend/base/langflow/interface/types.py index efb516ee174c..904025a30414 100644 --- a/src/backend/base/langflow/interface/types.py +++ b/src/backend/base/langflow/interface/types.py @@ -1,26 +1,24 @@ -import asyncio +from __future__ import annotations + import json from typing import TYPE_CHECKING from loguru import logger + from langflow.custom.utils import abuild_custom_components, build_custom_components -from langflow.services.cache.base import AsyncBaseCacheService if TYPE_CHECKING: - from langflow.services.cache.base import CacheService from langflow.services.settings.service import SettingsService async def aget_all_types_dict(components_paths): """Get all types dictionary combining native and custom components.""" - custom_components_from_file = await abuild_custom_components(components_paths=components_paths) - return custom_components_from_file + return await abuild_custom_components(components_paths=components_paths) def get_all_types_dict(components_paths): """Get all types dictionary combining native and custom components.""" - custom_components_from_file = build_custom_components(components_paths=components_paths) - return custom_components_from_file + return build_custom_components(components_paths=components_paths) # TypeError: unhashable type: 'list' @@ -29,7 +27,7 @@ def key_func(*args, **kwargs): return json.dumps(args) + json.dumps(kwargs) -async def aget_all_components(components_paths, as_dict=False): +async def aget_all_components(components_paths, *, as_dict=False): """Get all components names combining native and custom components.""" all_types_dict = await aget_all_types_dict(components_paths) components = {} if as_dict else [] @@ -43,7 +41,7 @@ async def aget_all_components(components_paths, as_dict=False): return components -def get_all_components(components_paths, as_dict=False): +def get_all_components(components_paths, *, as_dict=False): """Get all components names combining native and custom components.""" all_types_dict = get_all_types_dict(components_paths) components = [] if not as_dict else {} @@ -57,47 +55,15 @@ def get_all_components(components_paths, as_dict=False): return components +all_types_dict_cache = None + + async def get_and_cache_all_types_dict( - settings_service: "SettingsService", - cache_service: "CacheService", - force_refresh: bool = False, - lock: asyncio.Lock | None = None, + settings_service: SettingsService, ): - async def get_from_cache(key): - """ - Retrieves a value from the cache based on the given key. - - Args: - key: The key to retrieve the value from the cache. - - Returns: - The value associated with the given key in the cache. - - Raises: - None. - """ - return await cache_service.get(key=key, lock=lock) - - async def set_in_cache(key, value): - """ - Sets the given key-value pair in the cache. - - Parameters: - - key: The key to set in the cache. - - value: The value to associate with the key in the cache. - - Returns: - None - """ - if isinstance(cache_service, AsyncBaseCacheService): - await cache_service.set(key=key, value=value, lock=lock) - else: - cache_service.set(key=key, value=value, lock=lock) - - all_types_dict = await get_from_cache("all_types_dict") - if not all_types_dict or force_refresh: + global all_types_dict_cache # noqa: PLW0603 + if all_types_dict_cache is None: logger.debug("Building langchain types dict") - all_types_dict = await aget_all_types_dict(settings_service.settings.components_path) - await set_in_cache("all_types_dict", all_types_dict) + all_types_dict_cache = await aget_all_types_dict(settings_service.settings.components_path) - return all_types_dict + return all_types_dict_cache diff --git a/src/backend/base/langflow/interface/utils.py b/src/backend/base/langflow/interface/utils.py index dfee360215f7..900560ea9321 100644 --- a/src/backend/base/langflow/interface/utils.py +++ b/src/backend/base/langflow/interface/utils.py @@ -3,6 +3,7 @@ import os import re from io import BytesIO +from pathlib import Path import yaml from langchain_core.language_models import BaseLanguageModel @@ -14,18 +15,21 @@ def load_file_into_dict(file_path: str) -> dict: - if not os.path.exists(file_path): - raise FileNotFoundError(f"File not found: {file_path}") + _file_path = Path(file_path) + if not _file_path.exists(): + msg = f"File not found: {file_path}" + raise FileNotFoundError(msg) # Files names are UUID, so we can't find the extension - with open(file_path, "r") as file: + with _file_path.open(encoding="utf-8") as file: try: data = json.load(file) except json.JSONDecodeError: file.seek(0) data = yaml.safe_load(file) except ValueError as exc: - raise ValueError("Invalid file type. Expected .json or .yaml.") from exc + msg = "Invalid file type. Expected .json or .yaml." + raise ValueError(msg) from exc return data @@ -67,11 +71,9 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: if not match: break - # Extract the variable name from either the single or double brace match - if match.group(1): # Match found in double braces - variable_name = "{{" + match.group(1) + "}}" # Re-add single braces for JSON strings - else: # Match found in single braces - variable_name = match.group(2) + # Extract the variable name from either the single or double brace match. + # If match found in double braces, re-add single braces for JSON strings. + variable_name = "{{" + match.group(1) + "}}" if match.group(1) else match.group(2) if variable_name is not None: # This means there is a match # but there is nothing inside the braces @@ -87,18 +89,18 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: return variables -def setup_llm_caching(): +def setup_llm_caching() -> None: """Setup LLM caching.""" settings_service = get_settings_service() try: set_langchain_cache(settings_service.settings) except ImportError: logger.warning(f"Could not import {settings_service.settings.cache_type}. ") - except Exception as exc: - logger.warning(f"Could not setup LLM caching. Error: {exc}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).warning("Could not setup LLM caching.") -def set_langchain_cache(settings): +def set_langchain_cache(settings) -> None: from langchain.globals import set_llm_cache from langflow.interface.importing.utils import import_class diff --git a/src/backend/base/langflow/io/__init__.py b/src/backend/base/langflow/io/__init__.py index 8d26e7ed75eb..6a9bf3fae4b2 100644 --- a/src/backend/base/langflow/io/__init__.py +++ b/src/backend/base/langflow/io/__init__.py @@ -1,45 +1,53 @@ from langflow.inputs import ( BoolInput, + CodeInput, DataInput, + DefaultPromptField, DictInput, DropdownInput, - MultiselectInput, FileInput, FloatInput, HandleInput, IntInput, + LinkInput, MessageInput, MessageTextInput, MultilineInput, MultilineSecretInput, + MultiselectInput, NestedDictInput, PromptInput, SecretStrInput, + SliderInput, StrInput, TableInput, - DefaultPromptField, ) from langflow.template import Output __all__ = [ "BoolInput", + "CodeInput", "DataInput", + "DefaultPromptField", + "DefaultPromptField", "DictInput", "DropdownInput", - "MultiselectInput", "FileInput", "FloatInput", "HandleInput", "IntInput", + "LinkInput", + "LinkInput", "MessageInput", + "MessageTextInput", "MultilineInput", "MultilineSecretInput", + "MultiselectInput", "NestedDictInput", + "Output", "PromptInput", "SecretStrInput", + "SliderInput", "StrInput", - "MessageTextInput", - "Output", "TableInput", - "DefaultPromptField", ] diff --git a/src/backend/base/langflow/io/schema.py b/src/backend/base/langflow/io/schema.py index b5d1f8add7be..8672d9789a4d 100644 --- a/src/backend/base/langflow/io/schema.py +++ b/src/backend/base/langflow/io/schema.py @@ -1,10 +1,10 @@ -from typing import TYPE_CHECKING, List, Literal, Type +from typing import TYPE_CHECKING, Literal from pydantic import BaseModel, Field, create_model from langflow.inputs.inputs import FieldTypes -_convert_field_type_to_type: dict[FieldTypes, Type] = { +_convert_field_type_to_type: dict[FieldTypes, type] = { FieldTypes.TEXT: str, FieldTypes.INTEGER: int, FieldTypes.FLOAT: float, @@ -14,40 +14,46 @@ FieldTypes.TABLE: dict, FieldTypes.FILE: str, FieldTypes.PROMPT: str, + FieldTypes.OTHER: str, } if TYPE_CHECKING: from langflow.inputs.inputs import InputTypes -def create_input_schema(inputs: list["InputTypes"]) -> Type[BaseModel]: +def create_input_schema(inputs: list["InputTypes"]) -> type[BaseModel]: if not isinstance(inputs, list): - raise TypeError("inputs must be a list of Inputs") + msg = "inputs must be a list of Inputs" + raise TypeError(msg) fields = {} for input_model in inputs: # Create a Pydantic Field for each input field field_type = input_model.field_type if isinstance(field_type, FieldTypes): field_type = _convert_field_type_to_type[field_type] + else: + msg = f"Invalid field type: {field_type}" + raise TypeError(msg) if hasattr(input_model, "options") and isinstance(input_model.options, list) and input_model.options: literal_string = f"Literal{input_model.options}" # validate that the literal_string is a valid literal - field_type = eval(literal_string, {"Literal": Literal}) # type: ignore + field_type = eval(literal_string, {"Literal": Literal}) # noqa: S307 if hasattr(input_model, "is_list") and input_model.is_list: - field_type = List[field_type] # type: ignore + field_type = list[field_type] # type: ignore[valid-type] if input_model.name: name = input_model.name.replace("_", " ").title() elif input_model.display_name: name = input_model.display_name else: - raise ValueError("Input name or display_name is required") + msg = "Input name or display_name is required" + raise ValueError(msg) field_dict = { "title": name, "description": input_model.info or "", } if input_model.required is False: - field_dict["default"] = input_model.value # type: ignore + field_dict["default"] = input_model.value # type: ignore[assignment] pydantic_field = Field(**field_dict) fields[input_model.name] = (field_type, pydantic_field) diff --git a/src/backend/base/langflow/load/__init__.py b/src/backend/base/langflow/load/__init__.py index 59dbdf6e029b..7c3369761813 100644 --- a/src/backend/base/langflow/load/__init__.py +++ b/src/backend/base/langflow/load/__init__.py @@ -1,4 +1,4 @@ from .load import load_flow_from_json, run_flow_from_json -from .utils import upload_file, get_flow +from .utils import get_flow, upload_file -__all__ = ["load_flow_from_json", "run_flow_from_json", "upload_file", "get_flow"] +__all__ = ["get_flow", "load_flow_from_json", "run_flow_from_json", "upload_file"] diff --git a/src/backend/base/langflow/load/load.py b/src/backend/base/langflow/load/load.py index 972c2c7e7d5f..13d0ab051a9f 100644 --- a/src/backend/base/langflow/load/load.py +++ b/src/backend/base/langflow/load/load.py @@ -1,28 +1,28 @@ +import asyncio import json from pathlib import Path -from typing import List, Optional, Union from dotenv import load_dotenv from loguru import logger from langflow.graph import Graph from langflow.graph.schema import RunOutputs -from langflow.processing.process import process_tweaks, run_graph from langflow.logging.logger import configure +from langflow.processing.process import process_tweaks, run_graph from langflow.utils.util import update_settings def load_flow_from_json( - flow: Union[Path, str, dict], - tweaks: Optional[dict] = None, - log_level: Optional[str] = None, - log_file: Optional[str] = None, - env_file: Optional[str] = None, - cache: Optional[str] = None, - disable_logs: Optional[bool] = True, + flow: Path | str | dict, + *, + tweaks: dict | None = None, + log_level: str | None = None, + log_file: str | None = None, + env_file: str | None = None, + cache: str | None = None, + disable_logs: bool | None = True, ) -> Graph: - """ - Load a flow graph from a JSON file or a JSON object. + """Load a flow graph from a JSON file or a JSON object. Args: flow (Union[Path, str, dict]): The flow to load. It can be a file path (str or Path object) @@ -44,7 +44,7 @@ def load_flow_from_json( """ # If input is a file path, load JSON from the file log_file_path = Path(log_file) if log_file else None - configure(log_level=log_level, log_file=log_file_path, disable=disable_logs) + configure(log_level=log_level, log_file=log_file_path, disable=disable_logs, async_file=True) # override env variables with .env file if env_file: @@ -53,43 +53,45 @@ def load_flow_from_json( # Update settings with cache and components path update_settings(cache=cache) - if isinstance(flow, (str, Path)): - with open(flow, "r", encoding="utf-8") as f: + if isinstance(flow, str | Path): + with Path(flow).open(encoding="utf-8") as f: flow_graph = json.load(f) # If input is a dictionary, assume it's a JSON object elif isinstance(flow, dict): flow_graph = flow else: - raise TypeError("Input must be either a file path (str) or a JSON object (dict)") + msg = "Input must be either a file path (str) or a JSON object (dict)" + raise TypeError(msg) graph_data = flow_graph["data"] if tweaks is not None: graph_data = process_tweaks(graph_data, tweaks) - graph = Graph.from_payload(graph_data) - return graph + return Graph.from_payload(graph_data) -def run_flow_from_json( - flow: Union[Path, str, dict], +async def arun_flow_from_json( + flow: Path | str | dict, input_value: str, - tweaks: Optional[dict] = None, + *, + session_id: str | None = None, + tweaks: dict | None = None, input_type: str = "chat", output_type: str = "chat", - output_component: Optional[str] = None, - log_level: Optional[str] = None, - log_file: Optional[str] = None, - env_file: Optional[str] = None, - cache: Optional[str] = None, - disable_logs: Optional[bool] = True, + output_component: str | None = None, + log_level: str | None = None, + log_file: str | None = None, + env_file: str | None = None, + cache: str | None = None, + disable_logs: bool | None = True, fallback_to_env_vars: bool = False, -) -> List[RunOutputs]: - """ - Run a flow from a JSON file or dictionary. +) -> list[RunOutputs]: + """Run a flow from a JSON file or dictionary. Args: flow (Union[Path, str, dict]): The path to the JSON file or the JSON dictionary representing the flow. input_value (str): The input value to be processed by the flow. + session_id (str | None, optional): The session ID to be used for the flow. Defaults to None. tweaks (Optional[dict], optional): Optional tweaks to be applied to the flow. Defaults to None. input_type (str, optional): The type of the input value. Defaults to "chat". output_type (str, optional): The type of the output value. Defaults to "chat". @@ -99,22 +101,17 @@ def run_flow_from_json( env_file (Optional[str], optional): The environment file to load. Defaults to None. cache (Optional[str], optional): The cache directory to use. Defaults to None. disable_logs (Optional[bool], optional): Whether to disable logs. Defaults to True. - fallback_to_env_vars (bool, optional): Whether Global Variables should fallback to environment variables if not found. Defaults to False. + fallback_to_env_vars (bool, optional): Whether Global Variables should fallback to environment variables if + not found. Defaults to False. Returns: List[RunOutputs]: A list of RunOutputs objects representing the results of running the flow. """ - # Set all streaming to false - try: - import nest_asyncio # type: ignore - - nest_asyncio.apply() - except Exception as e: - logger.warning(f"Could not apply nest_asyncio: {e}") if tweaks is None: tweaks = {} tweaks["stream"] = False - graph = load_flow_from_json( + graph = await asyncio.to_thread( + load_flow_from_json, flow=flow, tweaks=tweaks, log_level=log_level, @@ -123,12 +120,54 @@ def run_flow_from_json( cache=cache, disable_logs=disable_logs, ) - result = run_graph( + result = await run_graph( graph=graph, + session_id=session_id, input_value=input_value, input_type=input_type, output_type=output_type, output_component=output_component, fallback_to_env_vars=fallback_to_env_vars, ) + await logger.complete() return result + + +def run_flow_from_json( + flow: Path | str | dict, + input_value: str, + *, + session_id: str | None = None, + tweaks: dict | None = None, + input_type: str = "chat", + output_type: str = "chat", + output_component: str | None = None, + log_level: str | None = None, + log_file: str | None = None, + env_file: str | None = None, + cache: str | None = None, + disable_logs: bool | None = True, + fallback_to_env_vars: bool = False, +) -> list[RunOutputs]: + coro = arun_flow_from_json( + flow, + input_value, + session_id=session_id, + tweaks=tweaks, + input_type=input_type, + output_type=output_type, + output_component=output_component, + log_level=log_level, + log_file=log_file, + env_file=env_file, + cache=cache, + disable_logs=disable_logs, + fallback_to_env_vars=fallback_to_env_vars, + ) + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + return asyncio.run(coro) + + return loop.run_until_complete(coro) diff --git a/src/backend/base/langflow/load/utils.py b/src/backend/base/langflow/load/utils.py index 11c04c6c4d3e..6db7628c23cf 100644 --- a/src/backend/base/langflow/load/utils.py +++ b/src/backend/base/langflow/load/utils.py @@ -1,11 +1,16 @@ +from pathlib import Path + import httpx from langflow.services.database.models.flow.model import FlowBase -def upload(file_path, host, flow_id): - """ - Upload a file to Langflow and return the file path. +class UploadError(Exception): + """Raised when an error occurs during the upload process.""" + + +def upload(file_path: str, host: str, flow_id: str): + """Upload a file to Langflow and return the file path. Args: file_path (str): The path to the file to be uploaded. @@ -16,23 +21,24 @@ def upload(file_path, host, flow_id): dict: A dictionary containing the file path. Raises: - Exception: If an error occurs during the upload process. + UploadError: If an error occurs during the upload process. """ try: url = f"{host}/api/v1/upload/{flow_id}" - with open(file_path, "rb") as file: + with Path(file_path).open("rb") as file: response = httpx.post(url, files={"file": file}) - if response.status_code == 200 or response.status_code == 201: + if response.status_code in {httpx.codes.OK, httpx.codes.CREATED}: return response.json() - else: - raise Exception(f"Error uploading file: {response.status_code}") except Exception as e: - raise Exception(f"Error uploading file: {e}") + msg = f"Error uploading file: {e}" + raise UploadError(msg) from e + + msg = f"Error uploading file: {response.status_code}" + raise UploadError(msg) def upload_file(file_path: str, host: str, flow_id: str, components: list[str], tweaks: dict | None = None): - """ - Upload a file to Langflow and return the file path. + """Upload a file to Langflow and return the file path. Args: file_path (str): The path to the file to be uploaded. @@ -46,23 +52,27 @@ def upload_file(file_path: str, host: str, flow_id: str, components: list[str], dict: A dictionary containing the file path and any tweaks that were applied. Raises: - Exception: If an error occurs during the upload process. + UploadError: If an error occurs during the upload process. """ - if not tweaks: - tweaks = {} try: response = upload(file_path, host, flow_id) - if response["file_path"]: - for component in components: - if isinstance(component, str): - tweaks[component] = {"path": response["file_path"]} - else: - raise ValueError(f"Component ID or name must be a string. Got {type(component)}") - return tweaks - else: - raise ValueError("Error uploading file") except Exception as e: - raise ValueError(f"Error uploading file: {e}") + msg = f"Error uploading file: {e}" + raise UploadError(msg) from e + + if not tweaks: + tweaks = {} + if response["file_path"]: + for component in components: + if isinstance(component, str): + tweaks[component] = {"path": response["file_path"]} + else: + msg = f"Error uploading file: component ID or name must be a string. Got {type(component)}" + raise UploadError(msg) + return tweaks + + msg = "Error uploading file" + raise UploadError(msg) def get_flow(url: str, flow_id: str): @@ -77,16 +87,17 @@ def get_flow(url: str, flow_id: str): dict: A dictionary containing the details of the flow. Raises: - Exception: If an error occurs during the retrieval process. + UploadError: If an error occurs during the retrieval process. """ try: flow_url = f"{url}/api/v1/flows/{flow_id}" response = httpx.get(flow_url) - if response.status_code == 200: + if response.status_code == httpx.codes.OK: json_response = response.json() - flow = FlowBase(**json_response).model_dump() - return flow - else: - raise Exception(f"Error retrieving flow: {response.status_code}") + return FlowBase(**json_response).model_dump() except Exception as e: - raise Exception(f"Error retrieving flow: {e}") + msg = f"Error retrieving flow: {e}" + raise UploadError(msg) from e + + msg = f"Error retrieving flow: {response.status_code}" + raise UploadError(msg) diff --git a/src/backend/base/langflow/logging/__init__.py b/src/backend/base/langflow/logging/__init__.py index b7d7bb5a1cf5..cc1b11cb1851 100644 --- a/src/backend/base/langflow/logging/__init__.py +++ b/src/backend/base/langflow/logging/__init__.py @@ -1,4 +1,4 @@ from .logger import configure, logger from .setup import disable_logging, enable_logging -__all__ = ["configure", "logger", "disable_logging", "enable_logging"] +__all__ = ["configure", "disable_logging", "enable_logging", "logger"] diff --git a/src/backend/base/langflow/logging/logger.py b/src/backend/base/langflow/logging/logger.py index 01b146b68afe..080ccd1b38b7 100644 --- a/src/backend/base/langflow/logging/logger.py +++ b/src/backend/base/langflow/logging/logger.py @@ -1,3 +1,4 @@ +import asyncio import json import logging import os @@ -5,10 +6,13 @@ from collections import deque from pathlib import Path from threading import Lock, Semaphore -from typing import Optional, TypedDict +from typing import TypedDict import orjson -from loguru import logger +from loguru import _defaults, logger +from loguru._error_interceptor import ErrorInterceptor +from loguru._file_sink import FileSink +from loguru._simple_sinks import AsyncSink from platformdirs import user_cache_dir from rich.logging import RichHandler from typing_extensions import NotRequired @@ -23,26 +27,22 @@ def __init__( self, max_readers: int = 20, # max number of concurrent readers for the buffer ): - """ - a buffer for storing log messages for the log retrieval API - the buffer can be overwritten by an env variable LANGFLOW_LOG_RETRIEVER_BUFFER_SIZE - because the logger is initialized before the settings_service are loaded - """ - self.max: int = 0 - env_buffer_size = os.getenv("LANGFLOW_LOG_RETRIEVER_BUFFER_SIZE", "0") - if env_buffer_size.isdigit(): - self.max = int(env_buffer_size) + """A buffer for storing log messages for the log retrieval API. + The buffer can be overwritten by an env variable LANGFLOW_LOG_RETRIEVER_BUFFER_SIZE + because the logger is initialized before the settings_service are loaded. + """ self.buffer: deque = deque() self._max_readers = max_readers self._wlock = Lock() self._rsemaphore = Semaphore(max_readers) + self._max = 0 def get_write_lock(self) -> Lock: return self._wlock - def write(self, message: str): + def write(self, message: str) -> None: record = json.loads(message) log_entry = record["text"] epoch = int(record["record"]["time"]["timestamp"] * 1000) @@ -52,11 +52,11 @@ def write(self, message: str): self.buffer.popleft() self.buffer.append((epoch, log_entry)) - def __len__(self): + def __len__(self) -> int: return len(self.buffer) def get_after_timestamp(self, timestamp: int, lines: int = 5) -> dict[int, str]: - rc = dict() + rc = {} self._rsemaphore.acquire() try: @@ -77,22 +77,18 @@ def get_before_timestamp(self, timestamp: int, lines: int = 5) -> dict[int, str] try: with self._wlock: as_list = list(self.buffer) - i = 0 max_index = -1 - for ts, msg in as_list: + for i, (ts, _) in enumerate(as_list): if ts >= timestamp: max_index = i break - i += 1 if max_index == -1: return self.get_last_n(lines) rc = {} - i = 0 start_from = max(max_index - lines, 0) - for ts, msg in as_list: + for i, (ts, msg) in enumerate(as_list): if start_from <= i < max_index: rc[ts] = msg - i += 1 return rc finally: self._rsemaphore.release() @@ -102,13 +98,23 @@ def get_last_n(self, last_idx: int) -> dict[int, str]: try: with self._wlock: as_list = list(self.buffer) - rc = {} - for ts, msg in as_list[-last_idx:]: - rc[ts] = msg - return rc + return dict(as_list[-last_idx:]) finally: self._rsemaphore.release() + @property + def max(self) -> int: + # Get it dynamically to allow for env variable changes + if self._max == 0: + env_buffer_size = os.getenv("LANGFLOW_LOG_RETRIEVER_BUFFER_SIZE", "0") + if env_buffer_size.isdigit(): + self._max = int(env_buffer_size) + return self._max + + @max.setter + def max(self, value: int) -> None: + self._max = value + def enabled(self) -> bool: return self.max > 0 @@ -130,7 +136,7 @@ def serialize_log(record): return orjson.dumps(subset) -def patching(record): +def patching(record) -> None: record["extra"]["serialized"] = serialize_log(record) if DEV is False: record.pop("exception", None) @@ -143,12 +149,31 @@ class LogConfig(TypedDict): log_env: NotRequired[str] +class AsyncFileSink(AsyncSink): + def __init__(self, file): + self._sink = FileSink( + path=file, + rotation="10 MB", # Log rotation based on file size + ) + super().__init__(self.write_async, None, ErrorInterceptor(_defaults.LOGURU_CATCH, -1)) + + async def complete(self): + await asyncio.to_thread(self._sink.stop) + for task in self._tasks: + await self._complete_task(task) + + async def write_async(self, message): + await asyncio.to_thread(self._sink.write, message) + + def configure( - log_level: Optional[str] = None, - log_file: Optional[Path] = None, - disable: Optional[bool] = False, - log_env: Optional[str] = None, -): + *, + log_level: str | None = None, + log_file: Path | None = None, + disable: bool | None = False, + log_env: str | None = None, + async_file: bool = False, +) -> None: if disable and log_level is None and log_file is None: logger.disable("langflow") if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None: @@ -156,6 +181,10 @@ def configure( if log_level is None: log_level = "ERROR" + if log_file is None: + env_log_file = os.getenv("LANGFLOW_LOG_FILE", "") + log_file = Path(env_log_file) if env_log_file else None + if log_env is None: log_env = os.getenv("LANGFLOW_LOG_ENV", "") @@ -189,18 +218,16 @@ def configure( log_file = cache_dir / "langflow.log" logger.debug(f"Log file: {log_file}") try: - log_file = Path(log_file) log_file.parent.mkdir(parents=True, exist_ok=True) logger.add( - sink=str(log_file), + sink=AsyncFileSink(log_file) if async_file else log_file, level=log_level.upper(), format=log_format, - rotation="10 MB", # Log rotation based on file size serialize=True, ) - except Exception as exc: - logger.error(f"Error setting up log file: {exc}") + except Exception: # noqa: BLE001 + logger.exception("Error setting up log file") if log_buffer.enabled(): logger.add(sink=log_buffer.write, format="{time} {level} {message}", serialize=True) @@ -211,25 +238,25 @@ def configure( setup_gunicorn_logger() -def setup_uvicorn_logger(): +def setup_uvicorn_logger() -> None: loggers = (logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith("uvicorn.")) for uvicorn_logger in loggers: uvicorn_logger.handlers = [] logging.getLogger("uvicorn").handlers = [InterceptHandler()] -def setup_gunicorn_logger(): +def setup_gunicorn_logger() -> None: logging.getLogger("gunicorn.error").handlers = [InterceptHandler()] logging.getLogger("gunicorn.access").handlers = [InterceptHandler()] class InterceptHandler(logging.Handler): - """ - Default handler from examples in loguru documentaion. - See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging + """Default handler from examples in loguru documentation. + + See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging. """ - def emit(self, record): + def emit(self, record) -> None: # Get corresponding Loguru level if it exists try: level = logger.level(record.levelname).name @@ -238,7 +265,7 @@ def emit(self, record): # Find caller from where originated the logged message frame, depth = logging.currentframe(), 2 - while frame.f_code.co_filename == logging.__file__: + while frame.f_code.co_filename == logging.__file__ and frame.f_back: frame = frame.f_back depth += 1 diff --git a/src/backend/base/langflow/logging/setup.py b/src/backend/base/langflow/logging/setup.py index fdf1e22b6945..2d207b28f8ac 100644 --- a/src/backend/base/langflow/logging/setup.py +++ b/src/backend/base/langflow/logging/setup.py @@ -3,14 +3,14 @@ LOGGING_CONFIGURED = False -def disable_logging(): - global LOGGING_CONFIGURED +def disable_logging() -> None: + global LOGGING_CONFIGURED # noqa: PLW0603 if not LOGGING_CONFIGURED: logger.disable("langflow") LOGGING_CONFIGURED = True -def enable_logging(): - global LOGGING_CONFIGURED +def enable_logging() -> None: + global LOGGING_CONFIGURED # noqa: PLW0603 logger.enable("langflow") LOGGING_CONFIGURED = True diff --git a/src/backend/base/langflow/main.py b/src/backend/base/langflow/main.py index b79f092bcf76..6edd353f4e27 100644 --- a/src/backend/base/langflow/main.py +++ b/src/backend/base/langflow/main.py @@ -1,15 +1,14 @@ import asyncio import json import os +import re import warnings from contextlib import asynccontextmanager from http import HTTPStatus from pathlib import Path -from typing import Optional from urllib.parse import urlencode -import nest_asyncio # type: ignore -from fastapi import FastAPI, HTTPException, Request, Response +from fastapi import FastAPI, HTTPException, Request, Response, status from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse, JSONResponse from fastapi.staticfiles import StaticFiles @@ -28,17 +27,19 @@ ) from langflow.interface.types import get_and_cache_all_types_dict from langflow.interface.utils import setup_llm_caching -from langflow.services.deps import get_cache_service, get_settings_service, get_telemetry_service -from langflow.services.plugins.langfuse_plugin import LangfuseInstance -from langflow.services.utils import initialize_services, teardown_services from langflow.logging.logger import configure +from langflow.services.deps import get_settings_service, get_telemetry_service +from langflow.services.utils import initialize_services, teardown_services # Ignore Pydantic deprecation warnings from Langchain warnings.filterwarnings("ignore", category=PydanticDeprecatedSince20) +MAX_PORT = 65535 + + class RequestCancelledMiddleware(BaseHTTPMiddleware): - def __init__(self, app): + def __init__(self, app) -> None: super().__init__(app) async def dispatch(self, request: Request, call_next): @@ -60,8 +61,7 @@ async def cancel_handler(): if cancel_task in done: return Response("Request was cancelled", status_code=499) - else: - return await handler_task + return await handler_task class JavaScriptMIMETypeMiddleware(BaseHTTPMiddleware): @@ -70,53 +70,68 @@ async def dispatch(self, request: Request, call_next): response = await call_next(request) except Exception as exc: if isinstance(exc, PydanticSerializationError): - message = "Something went wrong while serializing the response. Please share this error on our GitHub repository." + message = ( + "Something went wrong while serializing the response. " + "Please share this error on our GitHub repository." + ) error_messages = json.dumps([message, str(exc)]) raise HTTPException(status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail=error_messages) from exc - raise exc - if "files/" not in request.url.path and request.url.path.endswith(".js") and response.status_code == 200: + raise + if ( + "files/" not in request.url.path + and request.url.path.endswith(".js") + and response.status_code == HTTPStatus.OK + ): response.headers["Content-Type"] = "text/javascript" return response -def get_lifespan(fix_migration=False, socketio_server=None, version=None): +def get_lifespan(*, fix_migration=False, version=None): + telemetry_service = get_telemetry_service() + + def _initialize(): + initialize_services(fix_migration=fix_migration) + setup_llm_caching() + initialize_super_user_if_needed() + @asynccontextmanager - async def lifespan(app: FastAPI): - nest_asyncio.apply() + async def lifespan(_app: FastAPI): + configure(async_file=True) + # Startup message if version: rprint(f"[bold green]Starting Langflow v{version}...[/bold green]") else: rprint("[bold green]Starting Langflow...[/bold green]") try: - initialize_services(fix_migration=fix_migration, socketio_server=socketio_server) - setup_llm_caching() - LangfuseInstance.update() - initialize_super_user_if_needed() - task = asyncio.create_task(get_and_cache_all_types_dict(get_settings_service(), get_cache_service())) - await create_or_update_starter_projects(task) - asyncio.create_task(get_telemetry_service().start()) - load_flows_from_directory() + await asyncio.to_thread(_initialize) + all_types_dict = await get_and_cache_all_types_dict(get_settings_service()) + await asyncio.to_thread(create_or_update_starter_projects, all_types_dict) + telemetry_service.start() + await asyncio.to_thread(load_flows_from_directory) + yield + except Exception as exc: if "langflow migration --fix" not in str(exc): - logger.error(exc) + logger.exception(exc) raise - # Shutdown message - rprint("[bold red]Shutting down Langflow...[/bold red]") - await teardown_services() + finally: + # Clean shutdown + logger.info("Cleaning up resources...") + await teardown_services() + await logger.complete() + # Final message + rprint("[bold red]Langflow shutdown complete[/bold red]") return lifespan def create_app(): """Create the FastAPI app and include the router.""" - try: - from langflow.version import __version__ # type: ignore - except ImportError: - from importlib.metadata import version + from langflow.utils.version import get_version_info - __version__ = version("langflow-base") + __version__ = get_version_info()["version"] configure() lifespan = get_lifespan(version=__version__) @@ -132,8 +147,38 @@ def create_app(): allow_headers=["*"], ) app.add_middleware(JavaScriptMIMETypeMiddleware) - # ! Deactivating this until we find a better solution - # app.add_middleware(RequestCancelledMiddleware) + + @app.middleware("http") + async def check_boundary(request: Request, call_next): + if "/api/v1/files/upload" in request.url.path: + content_type = request.headers.get("Content-Type") + + if not content_type or "multipart/form-data" not in content_type or "boundary=" not in content_type: + return JSONResponse( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": "Content-Type header must be 'multipart/form-data' with a boundary parameter."}, + ) + + boundary = content_type.split("boundary=")[-1].strip() + + if not re.match(r"^[\w\-]{1,70}$", boundary): + return JSONResponse( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": "Invalid boundary format"}, + ) + + body = await request.body() + + boundary_start = f"--{boundary}".encode() + boundary_end = f"--{boundary}--\r\n".encode() + + if not body.startswith(boundary_start) or not body.endswith(boundary_end): + return JSONResponse( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + content={"detail": "Invalid multipart formatting"}, + ) + + return await call_next(request) @app.middleware("http") async def flatten_query_string_lists(request: Request, call_next): @@ -149,15 +194,16 @@ async def flatten_query_string_lists(request: Request, call_next): if prome_port_str := os.environ.get("LANGFLOW_PROMETHEUS_PORT"): # set here for create_app() entry point prome_port = int(prome_port_str) - if prome_port > 0 or prome_port < 65535: + if prome_port > 0 or prome_port < MAX_PORT: rprint(f"[bold green]Starting Prometheus server on port {prome_port}...[/bold green]") settings.prometheus_enabled = True settings.prometheus_port = prome_port else: - raise ValueError(f"Invalid port number {prome_port_str}") + msg = f"Invalid port number {prome_port_str}" + raise ValueError(msg) if settings.prometheus_enabled: - from prometheus_client import start_http_server # type: ignore + from prometheus_client import start_http_server start_http_server(settings.prometheus_port) @@ -166,26 +212,25 @@ async def flatten_query_string_lists(request: Request, call_next): app.include_router(log_router) @app.exception_handler(Exception) - async def exception_handler(request: Request, exc: Exception): + async def exception_handler(_request: Request, exc: Exception): if isinstance(exc, HTTPException): - logger.error(f"HTTPException: {exc.detail}") + logger.error(f"HTTPException: {exc}", exc_info=exc) return JSONResponse( status_code=exc.status_code, content={"message": str(exc.detail)}, ) - else: - logger.error(f"unhandled error: {exc}") - return JSONResponse( - status_code=HTTPStatus.INTERNAL_SERVER_ERROR, - content={"message": str(exc)}, - ) + logger.error(f"unhandled error: {exc}", exc_info=exc) + return JSONResponse( + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + content={"message": str(exc)}, + ) FastAPIInstrumentor.instrument_app(app) return app -def setup_sentry(app: FastAPI): +def setup_sentry(app: FastAPI) -> None: settings = get_settings_service().settings if settings.sentry_dsn: import sentry_sdk @@ -199,12 +244,12 @@ def setup_sentry(app: FastAPI): app.add_middleware(SentryAsgiMiddleware) -def setup_static_files(app: FastAPI, static_files_dir: Path): - """ - Setup the static files directory. +def setup_static_files(app: FastAPI, static_files_dir: Path) -> None: + """Setup the static files directory. + Args: app (FastAPI): FastAPI app. - path (str): Path to the static files directory. + static_files_dir (str): Path to the static files directory. """ app.mount( "/", @@ -213,11 +258,12 @@ def setup_static_files(app: FastAPI, static_files_dir: Path): ) @app.exception_handler(404) - async def custom_404_handler(request, __): + async def custom_404_handler(_request, _exc): path = static_files_dir / "index.html" if not path.exists(): - raise RuntimeError(f"File at path {path} does not exist.") + msg = f"File at path {path} does not exist." + raise RuntimeError(msg) return FileResponse(path) @@ -227,7 +273,7 @@ def get_static_files_dir(): return frontend_path / "frontend" -def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = False) -> FastAPI: +def setup_app(static_files_dir: Path | None = None, *, backend_only: bool = False) -> FastAPI: """Setup the FastAPI app.""" # get the directory of the current file logger.info(f"Setting up app with static files directory {static_files_dir}") @@ -235,7 +281,8 @@ def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = Fals static_files_dir = get_static_files_dir() if not backend_only and (not static_files_dir or not static_files_dir.exists()): - raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") + msg = f"Static files directory {static_files_dir} does not exist." + raise RuntimeError(msg) app = create_app() if not backend_only and static_files_dir is not None: setup_static_files(app, static_files_dir) diff --git a/src/backend/base/langflow/memory.py b/src/backend/base/langflow/memory.py index 7561e22c694f..61461fe24a85 100644 --- a/src/backend/base/langflow/memory.py +++ b/src/backend/base/langflow/memory.py @@ -1,7 +1,9 @@ -import warnings -from typing import List, Sequence +import json +from collections.abc import Sequence from uuid import UUID +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import BaseMessage from loguru import logger from sqlalchemy import delete from sqlmodel import Session, col, select @@ -9,8 +11,6 @@ from langflow.schema.message import Message from langflow.services.database.models.message.model import MessageRead, MessageTable from langflow.services.deps import session_scope -from langflow.field_typing import BaseChatMessageHistory -from langchain_core.messages import BaseMessage def get_messages( @@ -21,23 +21,23 @@ def get_messages( order: str | None = "DESC", flow_id: UUID | None = None, limit: int | None = None, -) -> List[Message]: - """ - Retrieves messages from the monitor service based on the provided filters. +) -> list[Message]: + """Retrieves messages from the monitor service based on the provided filters. Args: sender (Optional[str]): The sender of the messages (e.g., "Machine" or "User") sender_name (Optional[str]): The name of the sender. session_id (Optional[str]): The session ID associated with the messages. order_by (Optional[str]): The field to order the messages by. Defaults to "timestamp". + order (Optional[str]): The order in which to retrieve the messages. Defaults to "DESC". + flow_id (Optional[UUID]): The flow ID associated with the messages. limit (Optional[int]): The maximum number of messages to retrieve. Returns: List[Data]: A list of Data objects representing the retrieved messages. """ - messages_read: list[Message] = [] with session_scope() as session: - stmt = select(MessageTable) + stmt = select(MessageTable).where(MessageTable.error == False) # noqa: E712 if sender: stmt = stmt.where(MessageTable.sender == sender) if sender_name: @@ -47,40 +47,51 @@ def get_messages( if flow_id: stmt = stmt.where(MessageTable.flow_id == flow_id) if order_by: - if order == "DESC": - col = getattr(MessageTable, order_by).desc() - else: - col = getattr(MessageTable, order_by).asc() + col = getattr(MessageTable, order_by).desc() if order == "DESC" else getattr(MessageTable, order_by).asc() stmt = stmt.order_by(col) if limit: stmt = stmt.limit(limit) messages = session.exec(stmt) - messages_read = [Message(**d.model_dump()) for d in messages] - - return messages_read + return [Message(**d.model_dump()) for d in messages] def add_messages(messages: Message | list[Message], flow_id: str | None = None): - """ - Add a message to the monitor service. - """ - try: - if not isinstance(messages, list): - messages = [messages] + """Add a message to the monitor service.""" + if not isinstance(messages, list): + messages = [messages] - if not all(isinstance(message, Message) for message in messages): - types = ", ".join([str(type(message)) for message in messages]) - raise ValueError(f"The messages must be instances of Message. Found: {types}") + if not all(isinstance(message, Message) for message in messages): + types = ", ".join([str(type(message)) for message in messages]) + msg = f"The messages must be instances of Message. Found: {types}" + raise ValueError(msg) - messages_models: list[MessageTable] = [] - for msg in messages: - messages_models.append(MessageTable.from_message(msg, flow_id=flow_id)) + try: + messages_models = [MessageTable.from_message(msg, flow_id=flow_id) for msg in messages] with session_scope() as session: messages_models = add_messagetables(messages_models, session) return [Message(**message.model_dump()) for message in messages_models] except Exception as e: logger.exception(e) - raise e + raise + + +def update_messages(messages: Message | list[Message]) -> list[Message]: + if not isinstance(messages, list): + messages = [messages] + + with session_scope() as session: + updated_messages: list[MessageTable] = [] + for message in messages: + msg = session.get(MessageTable, message.id) + if msg: + msg.sqlmodel_update(message.model_dump(exclude_unset=True, exclude_none=True)) + session.add(msg) + session.commit() + session.refresh(msg) + updated_messages.append(msg) + else: + logger.warning(f"Message with id {message.id} not found") + return [MessageRead.model_validate(message, from_attributes=True) for message in updated_messages] def add_messagetables(messages: list[MessageTable], session: Session): @@ -91,13 +102,20 @@ def add_messagetables(messages: list[MessageTable], session: Session): session.refresh(message) except Exception as e: logger.exception(e) - raise e - return [MessageRead.model_validate(message, from_attributes=True) for message in messages] + raise + new_messages = [] + for msg in messages: + msg.properties = json.loads(msg.properties) if isinstance(msg.properties, str) else msg.properties # type: ignore[arg-type] + msg.content_blocks = [json.loads(j) if isinstance(j, str) else j for j in msg.content_blocks] # type: ignore[arg-type] + msg.category = msg.category or "" + new_messages.append(msg) -def delete_messages(session_id: str): - """ - Delete messages from the monitor service based on the provided session ID. + return [MessageRead.model_validate(message, from_attributes=True) for message in new_messages] + + +def delete_messages(session_id: str) -> None: + """Delete messages from the monitor service based on the provided session ID. Args: session_id (str): The session ID associated with the messages to delete. @@ -108,19 +126,31 @@ def delete_messages(session_id: str): .where(col(MessageTable.session_id) == session_id) .execution_options(synchronize_session="fetch") ) - session.commit() + + +def delete_message(id_: str) -> None: + """Delete a message from the monitor service based on the provided ID. + + Args: + id_ (str): The ID of the message to delete. + """ + with session_scope() as session: + message = session.get(MessageTable, id_) + if message: + session.delete(message) + session.commit() def store_message( message: Message, flow_id: str | None = None, ) -> list[Message]: - """ - Stores a message in the memory. + """Stores a message in the memory. Args: message (Message): The message to store. - flow_id (Optional[str]): The flow ID associated with the message. When running from the CustomComponent you can access this using `self.graph.flow_id`. + flow_id (Optional[str]): The flow ID associated with the message. + When running from the CustomComponent you can access this using `self.graph.flow_id`. Returns: List[Message]: A list of data containing the stored message. @@ -129,12 +159,14 @@ def store_message( ValueError: If any of the required parameters (session_id, sender, sender_name) is not provided. """ if not message: - warnings.warn("No message provided.") + logger.warning("No message provided.") return [] if not message.session_id or not message.sender or not message.sender_name: - raise ValueError("All of session_id, sender, and sender_name must be provided.") - + msg = "All of session_id, sender, and sender_name must be provided." + raise ValueError(msg) + if hasattr(message, "id") and message.id: + return update_messages([message]) return add_messages([message], flow_id=flow_id) @@ -148,11 +180,11 @@ def __init__( self.session_id = session_id @property - def messages(self) -> List[BaseMessage]: + def messages(self) -> list[BaseMessage]: messages = get_messages( session_id=self.session_id, ) - return [m.to_lc_message() for m in messages] + return [m.to_lc_message() for m in messages if not m.error] # Exclude error messages def add_messages(self, messages: Sequence[BaseMessage]) -> None: for lc_message in messages: diff --git a/src/backend/base/langflow/processing/base.py b/src/backend/base/langflow/processing/base.py index 26da99842c2f..3ef0909dbbdc 100644 --- a/src/backend/base/langflow/processing/base.py +++ b/src/backend/base/langflow/processing/base.py @@ -1,16 +1,18 @@ -from typing import TYPE_CHECKING, List, Union +from __future__ import annotations + +from typing import TYPE_CHECKING -from langchain_core.callbacks import BaseCallbackHandler from loguru import logger from langflow.services.deps import get_plugins_service if TYPE_CHECKING: - from langfuse.callback import CallbackHandler # type: ignore + from langchain_core.callbacks import BaseCallbackHandler + from langfuse.callback import CallbackHandler -def setup_callbacks(sync, trace_id, **kwargs): - """Setup callbacks for langchain object""" +def setup_callbacks(trace_id): + """Setup callbacks for langchain object.""" callbacks = [] plugin_service = get_plugins_service() plugin_callbacks = plugin_service.get_callbacks(_id=trace_id) @@ -28,16 +30,14 @@ def get_langfuse_callback(trace_id): try: trace = langfuse.trace(name="langflow-" + trace_id, id=trace_id) return trace.getNewHandler() - except Exception as exc: - logger.error(f"Error initializing langfuse callback: {exc}") + except Exception: # noqa: BLE001 + logger.exception("Error initializing langfuse callback") return None -def flush_langfuse_callback_if_present(callbacks: List[Union[BaseCallbackHandler, "CallbackHandler"]]): - """ - If langfuse callback is present, run callback.langfuse.flush() - """ +def flush_langfuse_callback_if_present(callbacks: list[BaseCallbackHandler | CallbackHandler]) -> None: + """If langfuse callback is present, run callback.langfuse.flush().""" for callback in callbacks: if hasattr(callback, "langfuse") and hasattr(callback.langfuse, "flush"): callback.langfuse.flush() diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py index 3b4684ec6aff..2bf41d8fa5a1 100644 --- a/src/backend/base/langflow/processing/process.py +++ b/src/backend/base/langflow/processing/process.py @@ -1,17 +1,20 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast from loguru import logger from pydantic import BaseModel -from langflow.graph.graph.base import Graph -from langflow.graph.schema import RunOutputs from langflow.graph.vertex.base import Vertex +from langflow.processing.utils import validate_and_repair_json from langflow.schema.graph import InputValue, Tweaks from langflow.schema.schema import INPUT_FIELD_NAME from langflow.services.deps import get_settings_service if TYPE_CHECKING: from langflow.api.v1.schemas import InputValueRequest + from langflow.graph.graph.base import Graph + from langflow.graph.schema import RunOutputs class Result(BaseModel): @@ -20,19 +23,17 @@ class Result(BaseModel): async def run_graph_internal( - graph: "Graph", + graph: Graph, flow_id: str, + *, stream: bool = False, - session_id: Optional[str] = None, - inputs: Optional[List["InputValueRequest"]] = None, - outputs: Optional[List[str]] = None, -) -> tuple[List[RunOutputs], str]: - """Run the graph and generate the result""" + session_id: str | None = None, + inputs: list[InputValueRequest] | None = None, + outputs: list[str] | None = None, +) -> tuple[list[RunOutputs], str]: + """Run the graph and generate the result.""" inputs = inputs or [] - if session_id is None: - session_id_str = flow_id - else: - session_id_str = session_id + effective_session_id = session_id or flow_id components = [] inputs_list = [] types = [] @@ -45,35 +46,39 @@ async def run_graph_internal( types.append(input_value_request.type) fallback_to_env_vars = get_settings_service().settings.fallback_to_env_var - + graph.session_id = effective_session_id run_outputs = await graph.arun( inputs=inputs_list, inputs_components=components, types=types, outputs=outputs or [], stream=stream, - session_id=session_id_str or "", + session_id=effective_session_id or "", fallback_to_env_vars=fallback_to_env_vars, ) - return run_outputs, session_id_str + return run_outputs, effective_session_id -def run_graph( - graph: "Graph", +async def run_graph( + graph: Graph, input_value: str, input_type: str, output_type: str, + *, + session_id: str | None = None, fallback_to_env_vars: bool = False, - output_component: Optional[str] = None, -) -> List[RunOutputs]: - """ - Runs the given Langflow Graph with the specified input and returns the outputs. + output_component: str | None = None, +) -> list[RunOutputs]: + """Runs the given Langflow Graph with the specified input and returns the outputs. Args: graph (Graph): The graph to be executed. input_value (str): The input value to be passed to the graph. input_type (str): The type of the input value. output_type (str): The type of the desired output. + session_id (str | None, optional): The session ID to be used for the flow. Defaults to None. + fallback_to_env_vars (bool, optional): Whether to fallback to environment variables. + Defaults to False. output_component (Optional[str], optional): The specific output component to retrieve. Defaults to None. Returns: @@ -100,33 +105,34 @@ def run_graph( components.append(input_value_request.components or []) inputs_list.append({INPUT_FIELD_NAME: input_value_request.input_value}) types.append(input_value_request.type) - run_outputs = graph.run( + return await graph.arun( inputs_list, - components, - types, - outputs or [], + inputs_components=components, + types=types, + outputs=outputs or [], stream=False, - session_id="", + session_id=session_id, fallback_to_env_vars=fallback_to_env_vars, ) - return run_outputs def validate_input( - graph_data: Dict[str, Any], tweaks: Union["Tweaks", Dict[str, str | Dict[str, Any]]] -) -> List[Dict[str, Any]]: + graph_data: dict[str, Any], tweaks: Tweaks | dict[str, str | dict[str, Any]] +) -> list[dict[str, Any]]: if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): - raise ValueError("graph_data and tweaks should be dictionaries") + msg = "graph_data and tweaks should be dictionaries" + raise TypeError(msg) nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") if not isinstance(nodes, list): - raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") + msg = "graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key" + raise TypeError(msg) return nodes -def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: +def apply_tweaks(node: dict[str, Any], node_tweaks: dict[str, Any]) -> None: template_data = node.get("data", {}).get("node", {}).get("template") if not isinstance(template_data, dict): @@ -137,52 +143,49 @@ def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: if tweak_name not in template_data: continue if tweak_name in template_data: - if isinstance(tweak_value, dict): + if template_data[tweak_name]["type"] == "NestedDict": + value = validate_and_repair_json(tweak_value) + template_data[tweak_name]["value"] = value + elif isinstance(tweak_value, dict): for k, v in tweak_value.items(): - k = "file_path" if template_data[tweak_name]["type"] == "file" else k - template_data[tweak_name][k] = v + _k = "file_path" if template_data[tweak_name]["type"] == "file" else k + template_data[tweak_name][_k] = v else: key = "file_path" if template_data[tweak_name]["type"] == "file" else "value" template_data[tweak_name][key] = tweak_value -def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: Dict[str, Any]) -> None: +def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: dict[str, Any]) -> None: for tweak_name, tweak_value in node_tweaks.items(): if tweak_name and tweak_value and tweak_name in vertex.params: vertex.params[tweak_name] = tweak_value def process_tweaks( - graph_data: Dict[str, Any], tweaks: Union["Tweaks", Dict[str, Dict[str, Any]]], stream: bool = False -) -> Dict[str, Any]: - """ - This function is used to tweak the graph data using the node id and the tweaks dict. + graph_data: dict[str, Any], tweaks: Tweaks | dict[str, dict[str, Any]], *, stream: bool = False +) -> dict[str, Any]: + """This function is used to tweak the graph data using the node id and the tweaks dict. :param graph_data: The dictionary containing the graph data. It must contain a 'data' key with 'nodes' as its child or directly contain 'nodes' key. Each node should have an 'id' and 'data'. :param tweaks: The dictionary containing the tweaks. The keys can be the node id or the name of the tweak. The values can be a dictionary containing the tweaks for the node or the value of the tweak. - :param stream: A boolean flag indicating whether streaming should be deactivated across all components or not. Default is False. + :param stream: A boolean flag indicating whether streaming should be deactivated across all components or not. + Default is False. :return: The modified graph_data dictionary. :raises ValueError: If the input is not in the expected format. """ - tweaks_dict = {} - if not isinstance(tweaks, dict): - tweaks_dict = cast(Dict[str, Any], tweaks.model_dump()) - else: - tweaks_dict = tweaks + tweaks_dict = cast(dict[str, Any], tweaks.model_dump()) if not isinstance(tweaks, dict) else tweaks if "stream" not in tweaks_dict: tweaks_dict |= {"stream": stream} - nodes = validate_input(graph_data, cast(Dict[str, str | Dict[str, Any]], tweaks_dict)) + nodes = validate_input(graph_data, cast(dict[str, str | dict[str, Any]], tweaks_dict)) nodes_map = {node.get("id"): node for node in nodes} nodes_display_name_map = {node.get("data", {}).get("node", {}).get("display_name"): node for node in nodes} all_nodes_tweaks = {} for key, value in tweaks_dict.items(): if isinstance(value, dict): - if node := nodes_map.get(key): - apply_tweaks(node, value) - elif node := nodes_display_name_map.get(key): + if (node := nodes_map.get(key)) or (node := nodes_display_name_map.get(key)): apply_tweaks(node, value) else: all_nodes_tweaks[key] = value @@ -193,7 +196,7 @@ def process_tweaks( return graph_data -def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]): +def process_tweaks_on_graph(graph: Graph, tweaks: dict[str, dict[str, Any]]): for vertex in graph.vertices: if isinstance(vertex, Vertex) and isinstance(vertex.id, str): node_id = vertex.id diff --git a/src/backend/base/langflow/processing/utils.py b/src/backend/base/langflow/processing/utils.py new file mode 100644 index 000000000000..02051c62a6db --- /dev/null +++ b/src/backend/base/langflow/processing/utils.py @@ -0,0 +1,25 @@ +import json +from typing import Any + +from json_repair import repair_json + + +def validate_and_repair_json(json_str: str | dict) -> dict[str, Any] | str: + """Validates a JSON string and attempts to repair it if invalid. + + Args: + json_str (str): The JSON string to validate/repair + + Returns: + Union[Dict[str, Any], str]: The parsed JSON dict if valid/repairable, + otherwise returns the original string + """ + if not isinstance(json_str, str): + return json_str + try: + # If invalid, attempt repair + repaired = repair_json(json_str) + return json.loads(repaired) + except (json.JSONDecodeError, ImportError): + # Return original if repair fails or module not found + return json_str diff --git a/src/backend/base/langflow/schema/__init__.py b/src/backend/base/langflow/schema/__init__.py index ae65fd05afab..e84ff7dfc1c8 100644 --- a/src/backend/base/langflow/schema/__init__.py +++ b/src/backend/base/langflow/schema/__init__.py @@ -1,4 +1,4 @@ -from .dotdict import dotdict from .data import Data +from .dotdict import dotdict __all__ = ["Data", "dotdict"] diff --git a/src/backend/base/langflow/schema/artifact.py b/src/backend/base/langflow/schema/artifact.py index 27eb3c52c0b5..0e95b041d262 100644 --- a/src/backend/base/langflow/schema/artifact.py +++ b/src/backend/base/langflow/schema/artifact.py @@ -1,12 +1,14 @@ +from collections.abc import Generator from enum import Enum -from typing import Generator from fastapi.encoders import jsonable_encoder +from loguru import logger from pydantic import BaseModel -from langflow.schema import Data +from langflow.schema.data import Data +from langflow.schema.encoders import CUSTOM_ENCODERS from langflow.schema.message import Message -from langflow.schema.schema import recursive_serialize_or_str +from langflow.schema.serialize import recursive_serialize_or_str class ArtifactType(str, Enum): @@ -41,11 +43,11 @@ def get_artifact_type(value, build_result=None) -> str: case list(): result = ArtifactType.ARRAY - if result == ArtifactType.UNKNOWN: - if build_result and isinstance(build_result, Generator): - result = ArtifactType.STREAM - elif isinstance(value, Message) and isinstance(value.text, Generator): - result = ArtifactType.STREAM + if result == ArtifactType.UNKNOWN and ( + (build_result and isinstance(build_result, Generator)) + or (isinstance(value, Message) and isinstance(value.text, Generator)) + ): + result = ArtifactType.STREAM return result.value @@ -56,19 +58,18 @@ def post_process_raw(raw, artifact_type: str): elif artifact_type == ArtifactType.ARRAY.value: _raw = [] for item in raw: - if hasattr(item, "dict"): - _raw.append(recursive_serialize_or_str(item)) - elif hasattr(item, "model_dump"): + if hasattr(item, "dict") or hasattr(item, "model_dump"): _raw.append(recursive_serialize_or_str(item)) else: _raw.append(str(item)) raw = _raw elif artifact_type == ArtifactType.UNKNOWN.value and raw is not None: - if isinstance(raw, (BaseModel, dict)): + if isinstance(raw, BaseModel | dict): try: - raw = jsonable_encoder(raw) + raw = jsonable_encoder(raw, custom_encoder=CUSTOM_ENCODERS) artifact_type = ArtifactType.OBJECT.value - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug(f"Error converting to json: {raw} ({type(raw)})") raw = "Built Successfully ✨" else: raw = "Built Successfully ✨" diff --git a/src/backend/base/langflow/schema/content_block.py b/src/backend/base/langflow/schema/content_block.py new file mode 100644 index 000000000000..6df7ef6e8ba9 --- /dev/null +++ b/src/backend/base/langflow/schema/content_block.py @@ -0,0 +1,62 @@ +from typing import Annotated + +from pydantic import BaseModel, Discriminator, Field, Tag, field_serializer, field_validator +from typing_extensions import TypedDict + +from .content_types import CodeContent, ErrorContent, JSONContent, MediaContent, TextContent, ToolContent + + +def _get_type(d: dict | BaseModel) -> str | None: + if isinstance(d, dict): + return d.get("type") + return getattr(d, "type", None) + + +# Create a union type of all content types +ContentType = Annotated[ + Annotated[ToolContent, Tag("tool_use")] + | Annotated[ErrorContent, Tag("error")] + | Annotated[TextContent, Tag("text")] + | Annotated[MediaContent, Tag("media")] + | Annotated[CodeContent, Tag("code")] + | Annotated[JSONContent, Tag("json")], + Discriminator(_get_type), +] + + +class ContentBlock(BaseModel): + """A block of content that can contain different types of content.""" + + title: str + contents: list[ContentType] + allow_markdown: bool = Field(default=True) + media_url: list[str] | None = None + + def __init__(self, **data) -> None: + super().__init__(**data) + schema_dict = self.__pydantic_core_schema__["schema"] + if "fields" in schema_dict: + fields = schema_dict["fields"] + elif "schema" in schema_dict: + fields = schema_dict["schema"]["fields"] + fields_with_default = (f for f, d in fields.items() if "default" in d["schema"]) + self.model_fields_set.update(fields_with_default) + + @field_validator("contents", mode="before") + @classmethod + def validate_contents(cls, v) -> list[ContentType]: + if isinstance(v, dict): + msg = "Contents must be a list of ContentTypes" + raise TypeError(msg) + return [v] if isinstance(v, BaseModel) else v + + @field_serializer("contents") + def serialize_contents(self, value) -> list[dict]: + return [v.model_dump() for v in value] + + +class ContentBlockDict(TypedDict): + title: str + contents: list[dict] + allow_markdown: bool + media_url: list[str] | None diff --git a/src/backend/base/langflow/schema/content_types.py b/src/backend/base/langflow/schema/content_types.py new file mode 100644 index 000000000000..d3f580a36d71 --- /dev/null +++ b/src/backend/base/langflow/schema/content_types.py @@ -0,0 +1,91 @@ +from typing import Any, Literal + +from fastapi.encoders import jsonable_encoder +from pydantic import BaseModel, ConfigDict, Field, model_serializer +from typing_extensions import TypedDict + +from langflow.schema.encoders import CUSTOM_ENCODERS + + +class HeaderDict(TypedDict, total=False): + title: str | None + icon: str | None + + +class BaseContent(BaseModel): + """Base class for all content types.""" + + type: str = Field(..., description="Type of the content") + duration: int | None = None + header: HeaderDict | None = Field(default_factory=dict) + + def to_dict(self) -> dict[str, Any]: + return self.model_dump() + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> "BaseContent": + return cls(**data) + + @model_serializer(mode="wrap") + def serialize_model(self, nxt) -> dict[str, Any]: + try: + dump = nxt(self) + return jsonable_encoder(dump, custom_encoder=CUSTOM_ENCODERS) + except Exception: # noqa: BLE001 + return nxt(self) + + +class ErrorContent(BaseContent): + """Content type for error messages.""" + + type: Literal["error"] = Field(default="error") + component: str | None = None + field: str | None = None + reason: str | None = None + solution: str | None = None + traceback: str | None = None + + +class TextContent(BaseContent): + """Content type for simple text content.""" + + type: Literal["text"] = Field(default="text") + text: str + duration: int | None = None + + +class MediaContent(BaseContent): + """Content type for media content.""" + + type: Literal["media"] = Field(default="media") + urls: list[str] + caption: str | None = None + + +class JSONContent(BaseContent): + """Content type for JSON content.""" + + type: Literal["json"] = Field(default="json") + data: dict[str, Any] + + +class CodeContent(BaseContent): + """Content type for code snippets.""" + + type: Literal["code"] = Field(default="code") + code: str + language: str + title: str | None = None + + +class ToolContent(BaseContent): + """Content type for tool start content.""" + + model_config = ConfigDict(populate_by_name=True) + + type: Literal["tool_use"] = Field(default="tool_use") + name: str | None = None + tool_input: dict[str, Any] = Field(default_factory=dict, alias="input") + output: Any | None = None + error: Any | None = None + duration: int | None = None diff --git a/src/backend/base/langflow/schema/data.py b/src/backend/base/langflow/schema/data.py index 4bc34e297f6b..65b120a03f4e 100644 --- a/src/backend/base/langflow/schema/data.py +++ b/src/backend/base/langflow/schema/data.py @@ -1,19 +1,21 @@ import copy import json -from typing import Optional, cast +from datetime import datetime +from decimal import Decimal +from typing import cast +from uuid import UUID from langchain_core.documents import Document from langchain_core.messages import AIMessage, BaseMessage, HumanMessage -from langchain_core.prompt_values import ImagePromptValue -from langchain_core.prompts.image import ImagePromptTemplate +from loguru import logger from pydantic import BaseModel, model_serializer, model_validator from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER +from langflow.utils.image import create_data_url class Data(BaseModel): - """ - Represents a record with text and optional data. + """Represents a record with text and optional data. Attributes: data (dict, optional): Additional data associated with the record. @@ -21,13 +23,14 @@ class Data(BaseModel): text_key: str = "text" data: dict = {} - default_value: Optional[str] = "" + default_value: str | None = "" @model_validator(mode="before") @classmethod def validate_data(cls, values): if not isinstance(values, dict): - raise ValueError("Data must be a dictionary") + msg = "Data must be a dictionary" + raise ValueError(msg) # noqa: TRY004 if not values.get("data"): values["data"] = {} # Any other keyword should be added to the data dictionary @@ -38,12 +41,10 @@ def validate_data(cls, values): @model_serializer(mode="plain", when_used="json") def serialize_model(self): - data = {k: v.to_json() if hasattr(v, "to_json") else v for k, v in self.data.items()} - return data + return {k: v.to_json() if hasattr(v, "to_json") else v for k, v in self.data.items()} def get_text(self): - """ - Retrieves the text value from the data dictionary. + """Retrieves the text value from the data dictionary. If the text key is present in the data dictionary, the corresponding value is returned. Otherwise, the default value is returned. @@ -55,8 +56,7 @@ def get_text(self): @classmethod def from_document(cls, document: Document) -> "Data": - """ - Converts a Document to a Data. + """Converts a Document to a Data. Args: document (Document): The Document to convert. @@ -70,8 +70,7 @@ def from_document(cls, document: Document) -> "Data": @classmethod def from_lc_message(cls, message: BaseMessage) -> "Data": - """ - Converts a BaseMessage to a Data. + """Converts a BaseMessage to a Data. Args: message (BaseMessage): The BaseMessage to convert. @@ -84,7 +83,8 @@ def from_lc_message(cls, message: BaseMessage) -> "Data": return cls(data=data, text_key="text") def __add__(self, other: "Data") -> "Data": - """ + """Combines the data of two data by attempting to add values for overlapping keys. + Combines the data of two data by attempting to add values for overlapping keys for all types that support the addition operation. Falls back to the value from 'other' record when addition is not supported. @@ -105,8 +105,7 @@ def __add__(self, other: "Data") -> "Data": return Data(data=combined_data) def to_lc_document(self) -> Document: - """ - Converts the Data to a Document. + """Converts the Data to a Document. Returns: Document: The converted Document. @@ -118,8 +117,7 @@ def to_lc_document(self) -> Document: def to_lc_message( self, ) -> BaseMessage: - """ - Converts the Data to a BaseMessage. + """Converts the Data to a BaseMessage. Returns: BaseMessage: The converted BaseMessage. @@ -130,7 +128,8 @@ def to_lc_message( # But first we check if all required keys are present in the data dictionary # they are: "text", "sender" if not all(key in self.data for key in ["text", "sender"]): - raise ValueError(f"Missing required keys ('text', 'sender') in Data: {self.data}") + msg = f"Missing required keys ('text', 'sender') in Data: {self.data}" + raise ValueError(msg) sender = self.data.get("sender", MESSAGE_SENDER_AI) text = self.data.get("text", "") files = self.data.get("files", []) @@ -138,12 +137,9 @@ def to_lc_message( if files: contents = [{"type": "text", "text": text}] for file_path in files: - image_template = ImagePromptTemplate() - image_prompt_value: ImagePromptValue = image_template.invoke( - input={"path": file_path}, config={"callbacks": self.get_langchain_callbacks()} - ) # type: ignore - contents.append({"type": "image_url", "image_url": image_prompt_value.image_url}) - human_message = HumanMessage(content=contents) # type: ignore + image_url = create_data_url(file_path) + contents.append({"type": "image_url", "image_url": {"url": image_url}}) + human_message = HumanMessage(content=contents) else: human_message = HumanMessage( content=[{"type": "text", "text": text}], @@ -151,25 +147,25 @@ def to_lc_message( return human_message - return AIMessage(content=text) # type: ignore + return AIMessage(content=text) def __getattr__(self, key): - """ - Allows attribute-like access to the data dictionary. - """ + """Allows attribute-like access to the data dictionary.""" try: if key.startswith("__"): return self.__getattribute__(key) if key in {"data", "text_key"} or key.startswith("_"): return super().__getattr__(key) return self.data[key] - except KeyError: + except KeyError as e: # Fallback to default behavior to raise AttributeError for undefined attributes - raise AttributeError(f"'{type(self).__name__}' object has no attribute '{key}'") + msg = f"'{type(self).__name__}' object has no attribute '{key}'" + raise AttributeError(msg) from e - def __setattr__(self, key, value): - """ - Allows attribute-like setting of values in the data dictionary, + def __setattr__(self, key, value) -> None: + """Set attribute-like values in the data dictionary. + + Allows attribute-like setting of values in the data dictionary. while still allowing direct assignment to class attributes. """ if key in {"data", "text_key"} or key.startswith("_"): @@ -180,19 +176,15 @@ def __setattr__(self, key, value): else: self.data[key] = value - def __delattr__(self, key): - """ - Allows attribute-like deletion from the data dictionary. - """ + def __delattr__(self, key) -> None: + """Allows attribute-like deletion from the data dictionary.""" if key in {"data", "text_key"} or key.startswith("_"): super().__delattr__(key) else: del self.data[key] def __deepcopy__(self, memo): - """ - Custom deepcopy implementation to handle copying of the Data object. - """ + """Custom deepcopy implementation to handle copying of the Data object.""" # Create a new Data object with a deep copy of the data dictionary return Data(data=copy.deepcopy(self.data, memo), text_key=self.text_key, default_value=self.default_value) @@ -204,12 +196,31 @@ def __str__(self) -> str: # return a JSON string representation of the Data atributes try: data = {k: v.to_json() if hasattr(v, "to_json") else v for k, v in self.data.items()} - return json.dumps(data, indent=4) - except Exception: + return serialize_data(data) # use the custom serializer + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error converting Data to JSON") return str(self.data) - def __contains__(self, key): + def __contains__(self, key) -> bool: return key in self.data def __eq__(self, other): return isinstance(other, Data) and self.data == other.data + + +def custom_serializer(obj): + if isinstance(obj, datetime): + return obj.astimezone().isoformat() + if isinstance(obj, Decimal): + return float(obj) + if isinstance(obj, UUID): + return str(obj) + if isinstance(obj, BaseModel): + return obj.model_dump() + # Add more custom serialization rules as needed + msg = f"Type {type(obj)} not serializable" + raise TypeError(msg) + + +def serialize_data(data): + return json.dumps(data, indent=4, default=custom_serializer) diff --git a/src/backend/base/langflow/schema/dotdict.py b/src/backend/base/langflow/schema/dotdict.py index f85c928bb0fe..93d57aec93f5 100644 --- a/src/backend/base/langflow/schema/dotdict.py +++ b/src/backend/base/langflow/schema/dotdict.py @@ -1,17 +1,17 @@ -class dotdict(dict): - """ - dotdict allows accessing dictionary elements using dot notation (e.g., dict.key instead of dict['key']). +class dotdict(dict): # noqa: N801 + """dotdict allows accessing dictionary elements using dot notation (e.g., dict.key instead of dict['key']). + It automatically converts nested dictionaries into dotdict instances, enabling dot notation on them as well. Note: - - Only keys that are valid attribute names (e.g., strings that could be variable names) are accessible via dot notation. + - Only keys that are valid attribute names (e.g., strings that could be variable names) are accessible via dot + notation. - Keys which are not valid Python attribute names or collide with the dict method names (like 'items', 'keys') should be accessed using the traditional dict['key'] notation. """ def __getattr__(self, attr): - """ - Override dot access to behave like dictionary lookup. Automatically convert nested dicts to dotdicts. + """Override dot access to behave like dictionary lookup. Automatically convert nested dicts to dotdicts. Args: attr (str): Attribute to access. @@ -27,13 +27,14 @@ def __getattr__(self, attr): if isinstance(value, dict) and not isinstance(value, dotdict): value = dotdict(value) self[attr] = value # Update self to nest dotdict for future accesses + except KeyError as e: + msg = f"'dotdict' object has no attribute '{attr}'" + raise AttributeError(msg) from e + else: return value - except KeyError: - raise AttributeError(f"'dotdict' object has no attribute '{attr}'") - def __setattr__(self, key, value): - """ - Override attribute setting to work as dictionary item assignment. + def __setattr__(self, key, value) -> None: + """Override attribute setting to work as dictionary item assignment. Args: key (str): The key under which to store the value. @@ -43,9 +44,8 @@ def __setattr__(self, key, value): value = dotdict(value) self[key] = value - def __delattr__(self, key): - """ - Override attribute deletion to work as dictionary item deletion. + def __delattr__(self, key) -> None: + """Override attribute deletion to work as dictionary item deletion. Args: key (str): The key of the item to delete from the dictionary. @@ -55,12 +55,12 @@ def __delattr__(self, key): """ try: del self[key] - except KeyError: - raise AttributeError(f"'dotdict' object has no attribute '{key}'") + except KeyError as e: + msg = f"'dotdict' object has no attribute '{key}'" + raise AttributeError(msg) from e def __missing__(self, key): - """ - Handle missing keys by returning an empty dotdict. This allows chaining access without raising KeyError. + """Handle missing keys by returning an empty dotdict. This allows chaining access without raising KeyError. Args: key: The missing key. diff --git a/src/backend/base/langflow/schema/encoders.py b/src/backend/base/langflow/schema/encoders.py new file mode 100644 index 000000000000..93b6af740ddc --- /dev/null +++ b/src/backend/base/langflow/schema/encoders.py @@ -0,0 +1,13 @@ +from collections.abc import Callable +from datetime import datetime + + +def encode_callable(obj: Callable): + return obj.__name__ if hasattr(obj, "__name__") else str(obj) + + +def encode_datetime(obj: datetime): + return obj.strftime("%Y-%m-%d %H:%M:%S %Z") + + +CUSTOM_ENCODERS = {Callable: encode_callable, datetime: encode_datetime} diff --git a/src/backend/base/langflow/schema/graph.py b/src/backend/base/langflow/schema/graph.py index 6b6cca66a0f8..0cbabf914a49 100644 --- a/src/backend/base/langflow/schema/graph.py +++ b/src/backend/base/langflow/schema/graph.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional, Union +from typing import Any from pydantic import BaseModel, Field, RootModel @@ -6,17 +6,20 @@ class InputValue(BaseModel): - components: Optional[List[str]] = [] - input_value: Optional[str] = None - type: Optional[InputType] = Field( + components: list[str] | None = [] + input_value: str | None = None + type: InputType | None = Field( "any", - description="Defines on which components the input value should be applied. 'any' applies to all input components.", + description="Defines on which components the input value should be applied. " + "'any' applies to all input components.", ) class Tweaks(RootModel): - root: dict[str, Union[str, dict[str, Any]]] = Field( - description="A dictionary of tweaks to adjust the flow's execution. Allows customizing flow behavior dynamically. All tweaks are overridden by the input values.", + root: dict[str, str | dict[str, Any]] = Field( + description="A dictionary of tweaks to adjust the flow's execution. " + "Allows customizing flow behavior dynamically. " + "All tweaks are overridden by the input values.", ) model_config = { "json_schema_extra": { @@ -34,10 +37,10 @@ class Tweaks(RootModel): def __getitem__(self, key): return self.root[key] - def __setitem__(self, key, value): + def __setitem__(self, key, value) -> None: self.root[key] = value - def __delitem__(self, key): + def __delitem__(self, key) -> None: del self.root[key] def items(self): diff --git a/src/backend/base/langflow/schema/image.py b/src/backend/base/langflow/schema/image.py index 552f75b8b614..c2b07225edf7 100644 --- a/src/backend/base/langflow/schema/image.py +++ b/src/backend/base/langflow/schema/image.py @@ -8,16 +8,16 @@ IMAGE_ENDPOINT = "/files/images/" -def is_image_file(file_path): +def is_image_file(file_path) -> bool: try: with PILImage.open(file_path) as img: img.verify() # Verify that it is, in fact, an image - return True - except (IOError, SyntaxError): + except (OSError, SyntaxError): return False + return True -async def get_file_paths(files: list[str]): +def get_file_paths(files: list[str]): storage_service = get_storage_service() file_paths = [] for file in files: @@ -28,6 +28,7 @@ async def get_file_paths(files: list[str]): async def get_files( file_paths: list[str], + *, convert_to_base64: bool = False, ): storage_service = get_storage_service() @@ -51,7 +52,8 @@ def to_base64(self): if self.path: files = get_files([self.path], convert_to_base64=True) return files[0] - raise ValueError("Image path is not set.") + msg = "Image path is not set." + raise ValueError(msg) def to_content_dict(self): return { @@ -59,5 +61,5 @@ def to_content_dict(self): "image_url": self.to_base64(), } - def get_url(self): + def get_url(self) -> str: return f"{IMAGE_ENDPOINT}{self.path}" diff --git a/src/backend/base/langflow/schema/log.py b/src/backend/base/langflow/schema/log.py index 4cc17544e830..e4a272d8cab1 100644 --- a/src/backend/base/langflow/schema/log.py +++ b/src/backend/base/langflow/schema/log.py @@ -1,10 +1,33 @@ -from typing import Optional, Union +from typing import Any, Literal, TypeAlias from pydantic import BaseModel from typing_extensions import Protocol -LoggableType = Union[str, dict, list, int, float, bool, None, BaseModel] +from langflow.schema.message import ContentBlock, Message +from langflow.schema.playground_events import PlaygroundEvent + +LoggableType: TypeAlias = str | dict | list | int | float | bool | None | BaseModel | PlaygroundEvent class LogFunctionType(Protocol): - def __call__(self, message: Union[LoggableType, list[LoggableType]], *, name: Optional[str] = None) -> None: ... + def __call__(self, message: LoggableType | list[LoggableType], *, name: str | None = None) -> None: ... + + +class SendMessageFunctionType(Protocol): + def __call__( + self, + message: Message | None = None, + text: str | None = None, + background_color: str | None = None, + text_color: str | None = None, + icon: str | None = None, + content_blocks: list[ContentBlock] | None = None, + format_type: Literal["default", "error", "warning", "info"] = "default", + id_: str | None = None, + *, + allow_markdown: bool = True, + ) -> Message: ... + + +class OnTokenFunctionType(Protocol): + def __call__(self, data: dict[str, Any]) -> None: ... diff --git a/src/backend/base/langflow/schema/message.py b/src/backend/base/langflow/schema/message.py index 53d7f014138b..5b4ddb2f5685 100644 --- a/src/backend/base/langflow/schema/message.py +++ b/src/backend/base/langflow/schema/message.py @@ -1,52 +1,55 @@ -import asyncio +from __future__ import annotations + +import json +import re +import traceback +from collections.abc import AsyncIterator, Iterator from datetime import datetime, timezone -from typing import Annotated, Any, AsyncIterator, Iterator, List, Optional +from typing import Annotated, Any, Literal from uuid import UUID from fastapi.encoders import jsonable_encoder from langchain_core.load import load from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage -from langchain_core.prompt_values import ImagePromptValue from langchain_core.prompts import BaseChatPromptTemplate, ChatPromptTemplate, PromptTemplate -from langchain_core.prompts.image import ImagePromptTemplate from loguru import logger -from pydantic import BeforeValidator, ConfigDict, Field, field_serializer, field_validator +from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_serializer, field_validator from langflow.base.prompts.utils import dict_values_to_string +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import ErrorContent from langflow.schema.data import Data from langflow.schema.image import Image, get_file_paths, is_image_file +from langflow.schema.properties import Properties, Source +from langflow.schema.validators import timestamp_to_str_validator # noqa: TCH001 from langflow.utils.constants import ( MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER, ) - - -def _timestamp_to_str(timestamp: datetime | str) -> str: - if isinstance(timestamp, str): - # Just check if the string is a valid datetime - try: - datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S") - return timestamp - except ValueError: - raise ValueError(f"Invalid timestamp: {timestamp}") - return timestamp.strftime("%Y-%m-%d %H:%M:%S") +from langflow.utils.image import create_data_url class Message(Data): model_config = ConfigDict(arbitrary_types_allowed=True) # Helper class to deal with image data text_key: str = "text" - text: Optional[str | AsyncIterator | Iterator] = Field(default="") - sender: Optional[str] = None - sender_name: Optional[str] = None - files: Optional[list[str | Image]] = Field(default=[]) - session_id: Optional[str] = Field(default="") - timestamp: Annotated[str, BeforeValidator(_timestamp_to_str)] = Field( - default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S") + text: str | AsyncIterator | Iterator | None = Field(default="") + sender: str | None = None + sender_name: str | None = None + files: list[str | Image] | None = Field(default=[]) + session_id: str | None = Field(default="") + timestamp: Annotated[str, timestamp_to_str_validator] = Field( + default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z") ) - flow_id: Optional[str | UUID] = None + flow_id: str | UUID | None = None + error: bool = Field(default=False) + edit: bool = Field(default=False) + + properties: Properties = Field(default_factory=Properties) + category: Literal["message", "error", "warning", "info"] | None = "message" + content_blocks: list[ContentBlock] = Field(default_factory=list) @field_validator("flow_id", mode="before") @classmethod @@ -55,12 +58,43 @@ def validate_flow_id(cls, value): value = str(value) return value - @field_serializer("flow_id") - def serialize_flow_id(value): + @field_validator("content_blocks", mode="before") + @classmethod + def validate_content_blocks(cls, value): + # value may start with [ or not + if isinstance(value, list): + return [ + ContentBlock.model_validate_json(v) if isinstance(v, str) else ContentBlock.model_validate(v) + for v in value + ] + if isinstance(value, str): + value = json.loads(value) if value.startswith("[") else [ContentBlock.model_validate_json(value)] + return value + + @field_validator("properties", mode="before") + @classmethod + def validate_properties(cls, value): if isinstance(value, str): - return UUID(value) + value = Properties.model_validate_json(value) + elif isinstance(value, dict): + value = Properties.model_validate(value) return value + @field_serializer("flow_id") + def serialize_flow_id(self, value): + if isinstance(value, UUID): + return str(value) + return value + + @field_serializer("timestamp") + def serialize_timestamp(self, value): + try: + # Try parsing with timezone + return datetime.strptime(value.strip(), "%Y-%m-%d %H:%M:%S %Z").astimezone(timezone.utc) + except ValueError: + # Try parsing without timezone + return datetime.strptime(value.strip(), "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc) + @field_validator("files", mode="before") @classmethod def validate_files(cls, value): @@ -71,7 +105,7 @@ def validate_files(cls, value): return value def model_post_init(self, __context: Any) -> None: - new_files: List[Any] = [] + new_files: list[Any] = [] for file in self.files or []: if is_image_file(file): new_files.append(Image(path=file)) @@ -81,14 +115,13 @@ def model_post_init(self, __context: Any) -> None: if "timestamp" not in self.data: self.data["timestamp"] = self.timestamp - def set_flow_id(self, flow_id: str): + def set_flow_id(self, flow_id: str) -> None: self.flow_id = flow_id def to_lc_message( self, ) -> BaseMessage: - """ - Converts the Data to a BaseMessage. + """Converts the Data to a BaseMessage. Returns: BaseMessage: The converted BaseMessage. @@ -100,24 +133,21 @@ def to_lc_message( # they are: "text", "sender" if self.text is None or not self.sender: logger.warning("Missing required keys ('text', 'sender') in Message, defaulting to HumanMessage.") - if not isinstance(self.text, str): - text = "" - else: - text = self.text + text = "" if not isinstance(self.text, str) else self.text if self.sender == MESSAGE_SENDER_USER or not self.sender: if self.files: contents = [{"type": "text", "text": text}] - contents.extend(self.sync_get_file_content_dicts()) - human_message = HumanMessage(content=contents) # type: ignore + contents.extend(self.get_file_content_dicts()) + human_message = HumanMessage(content=contents) else: human_message = HumanMessage(content=text) return human_message - return AIMessage(content=text) # type: ignore + return AIMessage(content=text) @classmethod - def from_lc_message(cls, lc_message: BaseMessage) -> "Message": + def from_lc_message(cls, lc_message: BaseMessage) -> Message: if lc_message.type == "human": sender = MESSAGE_SENDER_USER sender_name = MESSAGE_SENDER_NAME_USER @@ -134,17 +164,15 @@ def from_lc_message(cls, lc_message: BaseMessage) -> "Message": return cls(text=lc_message.content, sender=sender, sender_name=sender_name) @classmethod - def from_data(cls, data: "Data") -> "Message": - """ - Converts a BaseMessage to a Data. + def from_data(cls, data: Data) -> Message: + """Converts Data to a Message. Args: - record (BaseMessage): The BaseMessage to convert. + data: The Data to convert. Returns: - Data: The converted Data. + The converted Message. """ - return cls( text=data.text, sender=data.sender, @@ -153,38 +181,33 @@ def from_data(cls, data: "Data") -> "Message": session_id=data.session_id, timestamp=data.timestamp, flow_id=data.flow_id, + error=data.error, + edit=data.edit, ) @field_serializer("text", mode="plain") def serialize_text(self, value): - if isinstance(value, AsyncIterator): - return "" - elif isinstance(value, Iterator): + if isinstance(value, AsyncIterator | Iterator): return "" return value - def sync_get_file_content_dicts(self): - coro = self.get_file_content_dicts() - loop = asyncio.get_event_loop() - return loop.run_until_complete(coro) - # Keep this async method for backwards compatibility - async def get_file_content_dicts(self): + def get_file_content_dicts(self): content_dicts = [] - files = await get_file_paths(self.files) + files = get_file_paths(self.files) for file in files: if isinstance(file, Image): content_dicts.append(file.to_content_dict()) else: - image_template = ImagePromptTemplate() - image_prompt_value: ImagePromptValue = image_template.invoke(input={"path": file}) # type: ignore - content_dicts.append({"type": "image_url", "image_url": image_prompt_value.image_url}) + image_url = create_data_url(file) + content_dicts.append({"type": "image_url", "image_url": {"url": image_url}}) return content_dicts def load_lc_prompt(self): if "prompt" not in self: - raise ValueError("Prompt is required.") + msg = "Prompt is required." + raise ValueError(msg) # self.prompt was passed through jsonable_encoder # so inner messages are not BaseMessage # we need to convert them to BaseMessage @@ -201,8 +224,7 @@ def load_lc_prompt(self): messages.append(AIMessage(content=message.get("content"))) self.prompt["kwargs"]["messages"] = messages - loaded_prompt = load(self.prompt) - return loaded_prompt + return load(self.prompt) @classmethod def from_lc_prompt( @@ -221,29 +243,166 @@ def format_text(self): @classmethod async def from_template_and_variables(cls, template: str, **variables): + # This method has to be async for backwards compatibility with versions + # >1.0.15, <1.1 + return cls.from_template(template, **variables) + + # Define a sync version for backwards compatibility with versions >1.0.15, <1.1 + @classmethod + def from_template(cls, template: str, **variables): instance = cls(template=template, variables=variables) text = instance.format_text() - # Get all Message instances from the kwargs message = HumanMessage(content=text) contents = [] for value in variables.values(): if isinstance(value, cls) and value.files: - content_dicts = await value.get_file_content_dicts() + content_dicts = value.get_file_content_dicts() contents.extend(content_dicts) if contents: - message = HumanMessage(content=[{"type": "text", "text": text}] + contents) + message = HumanMessage(content=[{"type": "text", "text": text}, *contents]) + + prompt_template = ChatPromptTemplate.from_messages([message]) - prompt_template = ChatPromptTemplate(messages=[message]) # type: ignore instance.prompt = jsonable_encoder(prompt_template.to_json()) instance.messages = instance.prompt.get("kwargs", {}).get("messages", []) return instance + +class DefaultModel(BaseModel): + class Config: + from_attributes = True + populate_by_name = True + json_encoders = { + datetime: lambda v: v.isoformat(), + } + + def json(self, **kwargs): + # Usa a função de serialização personalizada + return super().model_dump_json(**kwargs, encoder=self.custom_encoder) + + @staticmethod + def custom_encoder(obj): + if isinstance(obj, datetime): + return obj.isoformat() + msg = f"Object of type {obj.__class__.__name__} is not JSON serializable" + raise TypeError(msg) + + +class MessageResponse(DefaultModel): + id: str | UUID | None = Field(default=None) + flow_id: UUID | None = Field(default=None) + timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + sender: str + sender_name: str + session_id: str + text: str + files: list[str] = [] + edit: bool + + properties: Properties | None = None + category: str | None = None + content_blocks: list[ContentBlock] | None = None + + @field_validator("files", mode="before") @classmethod - def sync_from_template_and_variables(cls, template: str, **variables): - # Run the async version in a sync way - try: - loop = asyncio.get_running_loop() - except RuntimeError: - return asyncio.run(cls.from_template_and_variables(template, **variables)) + def validate_files(cls, v): + if isinstance(v, str): + v = json.loads(v) + return v + + @field_serializer("timestamp") + @classmethod + def serialize_timestamp(cls, v): + v = v.replace(microsecond=0) + return v.strftime("%Y-%m-%d %H:%M:%S %Z") + + @field_serializer("files") + @classmethod + def serialize_files(cls, v): + if isinstance(v, list): + return json.dumps(v) + return v + + @classmethod + def from_message(cls, message: Message, flow_id: str | None = None): + # first check if the record has all the required fields + if message.text is None or not message.sender or not message.sender_name: + msg = "The message does not have the required fields (text, sender, sender_name)." + raise ValueError(msg) + return cls( + sender=message.sender, + sender_name=message.sender_name, + text=message.text, + session_id=message.session_id, + files=message.files or [], + timestamp=message.timestamp, + flow_id=flow_id, + ) + + +class ErrorMessage(Message): + """A message class specifically for error messages with predefined error-specific attributes.""" + + def __init__( + self, + exception: BaseException, + session_id: str, + source: Source, + trace_name: str | None = None, + flow_id: str | None = None, + ) -> None: + # This is done to avoid circular imports + if exception.__class__.__name__ == "ExceptionWithMessageError" and exception.__cause__ is not None: + exception = exception.__cause__ + # Get the error reason + reason = f"**{exception.__class__.__name__}**\n" + if hasattr(exception, "body") and "message" in exception.body: + reason += f" - **{exception.body.get('message')}**\n" + elif hasattr(exception, "code"): + reason += f" - **Code: {exception.code}**\n" + elif hasattr(exception, "args") and exception.args: + reason += f" - **Details: {exception.args[0]}**\n" + elif isinstance(exception, ValidationError): + reason += f" - **Details:**\n\n```python\n{exception!s}\n```\n" else: - return loop.run_until_complete(cls.from_template_and_variables(template, **variables)) + reason += " - **An unknown error occurred.**\n" + + # Get the sender ID + if trace_name: + match = re.search(r"\((.*?)\)", trace_name) + if match: + match.group(1) + + super().__init__( + session_id=session_id, + sender=source.display_name, + sender_name=source.display_name, + text=reason, + properties=Properties( + text_color="red", + background_color="red", + edited=False, + source=source, + icon="error", + allow_markdown=False, + targets=[], + ), + category="error", + error=True, + content_blocks=[ + ContentBlock( + title="Error", + contents=[ + ErrorContent( + type="error", + component=source.display_name, + field=str(exception.field) if hasattr(exception, "field") else None, + reason=reason, + solution=str(exception.solution) if hasattr(exception, "solution") else None, + traceback=traceback.format_exc(), + ) + ], + ) + ], + flow_id=flow_id, + ) diff --git a/src/backend/base/langflow/schema/playground_events.py b/src/backend/base/langflow/schema/playground_events.py new file mode 100644 index 000000000000..d7db91ddb929 --- /dev/null +++ b/src/backend/base/langflow/schema/playground_events.py @@ -0,0 +1,181 @@ +import inspect +from collections.abc import Callable +from datetime import datetime, timezone +from typing import Annotated, Literal +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator + +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import ErrorContent +from langflow.schema.properties import Properties +from langflow.schema.validators import timestamp_to_str_validator +from langflow.utils.constants import MESSAGE_SENDER_USER + + +class PlaygroundEvent(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + properties: Properties | None = Field(default=None) + sender_name: str | None = Field(default=None) + content_blocks: list[ContentBlock] | None = Field(default=None) + format_type: Literal["default", "error", "warning", "info"] = Field(default="default") + files: list[str] | None = Field(default=None) + text: str | None = Field(default=None) + timestamp: Annotated[str, timestamp_to_str_validator] = Field( + default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z") + ) + id_: UUID | str | None = Field(default=None, alias="id") + + @field_serializer("timestamp") + @classmethod + def serialize_timestamp(cls, v: str) -> str: + return v + + @field_validator("id_") + @classmethod + def validate_id(cls, v: UUID | str | None) -> str | None: + if isinstance(v, UUID): + return str(v) + return v + + +class MessageEvent(PlaygroundEvent): + category: Literal["message", "error", "warning", "info"] = "message" + format_type: Literal["default", "error", "warning", "info"] = Field(default="default") + session_id: str | None = Field(default=None) + error: bool = Field(default=False) + edit: bool = Field(default=False) + flow_id: UUID | str | None = Field(default=None) + sender: str = Field(default=MESSAGE_SENDER_USER) + sender_name: str = Field(default="User") + + @field_validator("flow_id") + @classmethod + def validate_flow_id(cls, v: UUID | str | None) -> str | None: + if isinstance(v, UUID): + return str(v) + return v + + +class ErrorEvent(MessageEvent): + background_color: str = Field(default="#FF0000") + text_color: str = Field(default="#FFFFFF") + format_type: Literal["default", "error", "warning", "info"] = Field(default="error") + allow_markdown: bool = Field(default=False) + category: Literal["error"] = "error" + + +class WarningEvent(PlaygroundEvent): + background_color: str = Field(default="#FFA500") + text_color: str = Field(default="#000000") + format_type: Literal["default", "error", "warning", "info"] = Field(default="warning") + + +class InfoEvent(PlaygroundEvent): + background_color: str = Field(default="#0000FF") + text_color: str = Field(default="#FFFFFF") + format_type: Literal["default", "error", "warning", "info"] = Field(default="info") + + +class TokenEvent(BaseModel): + chunk: str = Field(...) + id: UUID | str | None = Field(alias="id") + timestamp: Annotated[str, timestamp_to_str_validator] = Field( + default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z") + ) + + +# Factory functions first +def create_message( + text: str, + category: Literal["message", "error", "warning", "info"] = "message", + properties: dict | None = None, + content_blocks: list[ContentBlock] | None = None, + sender_name: str | None = None, + files: list[str] | None = None, + timestamp: str | None = None, + format_type: Literal["default", "error", "warning", "info"] = "default", + sender: str | None = None, + session_id: str | None = None, + id: UUID | str | None = None, # noqa: A002 + flow_id: UUID | str | None = None, + *, + error: bool = False, + edit: bool = False, +) -> MessageEvent: + return MessageEvent( + text=text, + properties=properties, + category=category, + content_blocks=content_blocks, + sender_name=sender_name, + files=files, + timestamp=timestamp, + format_type=format_type, + sender=sender, + id=id, + session_id=session_id, + error=error, + edit=edit, + flow_id=flow_id, + ) + + +def create_error( + text: str, + properties: dict | None = None, + traceback: str | None = None, + title: str = "Error", + timestamp: str | None = None, + id: UUID | str | None = None, # noqa: A002 + flow_id: UUID | str | None = None, + session_id: str | None = None, + content_blocks: list[ContentBlock] | None = None, +) -> ErrorEvent: + if traceback: + content_blocks = content_blocks or [] + content_blocks += [ContentBlock(title=title, contents=[ErrorContent(type="error", traceback=traceback)])] + return ErrorEvent( + text=text, + properties=properties, + content_blocks=content_blocks, + timestamp=timestamp, + id=id, + flow_id=flow_id, + session_id=session_id, + ) + + +def create_warning(message: str) -> WarningEvent: + return WarningEvent(text=message) + + +def create_info(message: str) -> InfoEvent: + return InfoEvent(text=message) + + +def create_token(chunk: str, id: str) -> TokenEvent: # noqa: A002 + return TokenEvent( + chunk=chunk, + id=id, + ) + + +_EVENT_CREATORS: dict[str, tuple[Callable, inspect.Signature]] = { + "message": (create_message, inspect.signature(create_message)), + "error": (create_error, inspect.signature(create_error)), + "warning": (create_warning, inspect.signature(create_warning)), + "info": (create_info, inspect.signature(create_info)), + "token": (create_token, inspect.signature(create_token)), +} + + +def create_event_by_type( + event_type: Literal["message", "error", "warning", "info", "token"], **kwargs +) -> PlaygroundEvent | dict: + if event_type not in _EVENT_CREATORS: + return kwargs + + creator_func, signature = _EVENT_CREATORS[event_type] + valid_params = {k: v for k, v in kwargs.items() if k in signature.parameters} + return creator_func(**valid_params) diff --git a/src/backend/base/langflow/schema/properties.py b/src/backend/base/langflow/schema/properties.py new file mode 100644 index 000000000000..1f54eb4739ac --- /dev/null +++ b/src/backend/base/langflow/schema/properties.py @@ -0,0 +1,36 @@ +from typing import Literal + +from pydantic import BaseModel, Field, field_serializer, field_validator + + +class Source(BaseModel): + id: str | None = Field(default=None, description="The id of the source component.") + display_name: str | None = Field(default=None, description="The display name of the source component.") + source: str | None = Field( + default=None, + description="The source of the message. Normally used to display the model name (e.g. 'gpt-4o')", + ) + + +class Properties(BaseModel): + text_color: str | None = None + background_color: str | None = None + edited: bool = False + source: Source = Field(default_factory=Source) + icon: str | None = None + allow_markdown: bool = False + state: Literal["partial", "complete"] = "complete" + targets: list = [] + + @field_validator("source", mode="before") + @classmethod + def validate_source(cls, v): + if isinstance(v, str): + return Source(id=v, display_name=v, source=v) + return v + + @field_serializer("source") + def serialize_source(self, value): + if isinstance(value, Source): + return value.model_dump() + return value diff --git a/src/backend/base/langflow/schema/schema.py b/src/backend/base/langflow/schema/schema.py index ba8ab9b3949e..875200655260 100644 --- a/src/backend/base/langflow/schema/schema.py +++ b/src/backend/base/langflow/schema/schema.py @@ -1,11 +1,13 @@ +from collections.abc import Generator from enum import Enum -from typing import AsyncIterator, Generator, Iterator, Literal, Union +from typing import Literal from pydantic import BaseModel from typing_extensions import TypedDict -from langflow.schema import Data +from langflow.schema.data import Data from langflow.schema.message import Message +from langflow.schema.serialize import recursive_serialize_or_str INPUT_FIELD_NAME = "input_value" @@ -33,7 +35,7 @@ class ErrorLog(TypedDict): class OutputValue(BaseModel): - message: Union[ErrorLog, StreamURL, dict, list, str] + message: ErrorLog | StreamURL | dict | list | str type: str @@ -55,12 +57,11 @@ def get_type(payload): case str(): result = LogType.TEXT - if result == LogType.UNKNOWN: - if payload and isinstance(payload, Generator): - result = LogType.STREAM - - elif isinstance(payload, Message) and isinstance(payload.text, Generator): - result = LogType.STREAM + if result == LogType.UNKNOWN and ( + (payload and isinstance(payload, Generator)) + or (isinstance(payload, Message) and isinstance(payload.text, Generator)) + ): + result = LogType.STREAM return result @@ -73,14 +74,14 @@ def get_message(payload): elif hasattr(payload, "model_dump"): message = payload.model_dump() - if message is None and isinstance(payload, (dict, str, Data)): + if message is None and isinstance(payload, dict | str | Data): message = payload.data if isinstance(payload, Data) else payload return message or payload def build_output_logs(vertex, result) -> dict: - outputs: dict[str, OutputValue] = dict() + outputs: dict[str, OutputValue] = {} component_instance = result[0] for index, output in enumerate(vertex.outputs): if component_instance.status is None: @@ -111,35 +112,3 @@ def build_output_logs(vertex, result) -> dict: outputs |= {name: OutputValue(message=message, type=_type).model_dump()} return outputs - - -def recursive_serialize_or_str(obj): - try: - if isinstance(obj, dict): - return {k: recursive_serialize_or_str(v) for k, v in obj.items()} - elif isinstance(obj, list): - return [recursive_serialize_or_str(v) for v in obj] - elif isinstance(obj, BaseModel): - if hasattr(obj, "model_dump"): - obj_dict = obj.model_dump() - elif hasattr(obj, "dict"): - obj_dict = obj.dict() # type: ignore - return {k: recursive_serialize_or_str(v) for k, v in obj_dict.items()} - - elif isinstance(obj, (AsyncIterator, Generator, Iterator)): - # contain memory addresses - # without consuming the iterator - # return list(obj) consumes the iterator - # return f"{obj}" this generates '' - # it is not useful - return "Unconsumed Stream" - elif hasattr(obj, "dict"): - return {k: recursive_serialize_or_str(v) for k, v in obj.dict().items()} - elif hasattr(obj, "model_dump"): - return {k: recursive_serialize_or_str(v) for k, v in obj.model_dump().items()} - elif issubclass(obj, BaseModel): - # This a type BaseModel and not an instance of it - return repr(obj) - return str(obj) - except Exception: - return str(obj) diff --git a/src/backend/base/langflow/schema/serialize.py b/src/backend/base/langflow/schema/serialize.py new file mode 100644 index 000000000000..d441fe6868e0 --- /dev/null +++ b/src/backend/base/langflow/schema/serialize.py @@ -0,0 +1,43 @@ +from collections.abc import AsyncIterator, Generator, Iterator +from datetime import datetime + +from loguru import logger +from pydantic import BaseModel +from pydantic.v1 import BaseModel as BaseModelV1 + + +def recursive_serialize_or_str(obj): + try: + if isinstance(obj, type) and issubclass(obj, BaseModel | BaseModelV1): + # This a type BaseModel and not an instance of it + return repr(obj) + if isinstance(obj, str): + return obj + if isinstance(obj, datetime): + return obj.isoformat() + if isinstance(obj, dict): + return {k: recursive_serialize_or_str(v) for k, v in obj.items()} + if isinstance(obj, list): + return [recursive_serialize_or_str(v) for v in obj] + if isinstance(obj, BaseModel | BaseModelV1): + if hasattr(obj, "model_dump"): + obj_dict = obj.model_dump() + elif hasattr(obj, "dict"): + obj_dict = obj.dict() + return {k: recursive_serialize_or_str(v) for k, v in obj_dict.items()} + + if isinstance(obj, AsyncIterator | Generator | Iterator): + # contain memory addresses + # without consuming the iterator + # return list(obj) consumes the iterator + # return f"{obj}" this generates '' + # it is not useful + return "Unconsumed Stream" + if hasattr(obj, "dict") and not isinstance(obj, type): + return {k: recursive_serialize_or_str(v) for k, v in obj.dict().items()} + if hasattr(obj, "model_dump") and not isinstance(obj, type): + return {k: recursive_serialize_or_str(v) for k, v in obj.model_dump().items()} + return str(obj) + except Exception: # noqa: BLE001 + logger.debug(f"Cannot serialize object {obj}") + return str(obj) diff --git a/src/backend/base/langflow/schema/table.py b/src/backend/base/langflow/schema/table.py index a26dd737f205..0850369e431e 100644 --- a/src/backend/base/langflow/schema/table.py +++ b/src/backend/base/langflow/schema/table.py @@ -1,7 +1,8 @@ from enum import Enum -from typing import List, Optional -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator + +VALID_TYPES = ["date", "number", "text", "json", "integer", "int", "float", "str", "string", "boolean"] class FormatterType(str, Enum): @@ -9,23 +10,41 @@ class FormatterType(str, Enum): text = "text" number = "number" json = "json" + boolean = "boolean" class Column(BaseModel): - display_name: str + model_config = ConfigDict(populate_by_name=True) name: str + display_name: str = Field(default="") sortable: bool = Field(default=True) filterable: bool = Field(default=True) - formatter: Optional[FormatterType | str] = None - - @field_validator("formatter") + formatter: FormatterType | str | None = Field(default=None, alias="type") + description: str | None = None + default: str | None = None + + @model_validator(mode="after") + def set_display_name(self): + if not self.display_name: + self.display_name = self.name + return self + + @field_validator("formatter", mode="before") + @classmethod def validate_formatter(cls, value): + if value in {"integer", "int", "float"}: + value = FormatterType.number + if value in {"str", "string"}: + value = FormatterType.text + if value == "dict": + value = FormatterType.json if isinstance(value, str): return FormatterType(value) if isinstance(value, FormatterType): return value - raise ValueError("Invalid formatter type") + msg = f"Invalid formatter type: {value}. Valid types are: {FormatterType}" + raise ValueError(msg) class TableSchema(BaseModel): - columns: List[Column] + columns: list[Column] diff --git a/src/backend/base/langflow/schema/validators.py b/src/backend/base/langflow/schema/validators.py new file mode 100644 index 000000000000..3c95bde51258 --- /dev/null +++ b/src/backend/base/langflow/schema/validators.py @@ -0,0 +1,35 @@ +from datetime import datetime + +from pydantic import BeforeValidator + + +def timestamp_to_str(timestamp: datetime | str) -> str: + if isinstance(timestamp, str): + # Just check if the string is a valid datetime + try: + datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S %Z") # noqa: DTZ007 + result = timestamp + except ValueError as e: + msg = f"Invalid timestamp: {timestamp}" + raise ValueError(msg) from e + else: + result = timestamp.strftime("%Y-%m-%d %H:%M:%S %Z") + return result + + +def timestamp_with_fractional_seconds(timestamp: datetime | str) -> str: + if isinstance(timestamp, str): + # Just check if the string is a valid datetime + try: + datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f %Z") # noqa: DTZ007 + result = timestamp + except ValueError as e: + msg = f"Invalid timestamp: {timestamp}" + raise ValueError(msg) from e + else: + result = timestamp.strftime("%Y-%m-%d %H:%M:%S.%f %Z") + return result + + +timestamp_to_str_validator = BeforeValidator(timestamp_to_str) +timestamp_with_fractional_seconds_validator = BeforeValidator(timestamp_with_fractional_seconds) diff --git a/src/backend/base/langflow/server.py b/src/backend/base/langflow/server.py index 0c3a21a2e25f..fd8167fb0ad1 100644 --- a/src/backend/base/langflow/server.py +++ b/src/backend/base/langflow/server.py @@ -2,11 +2,11 @@ import logging import signal -from gunicorn import glogging # type: ignore -from gunicorn.app.base import BaseApplication # type: ignore +from gunicorn import glogging +from gunicorn.app.base import BaseApplication from uvicorn.workers import UvicornWorker -from langflow.logging.logger import InterceptHandler # type: ignore +from langflow.logging.logger import InterceptHandler class LangflowUvicornWorker(UvicornWorker): @@ -18,7 +18,6 @@ def _install_sigint_handler(self) -> None: - https://github.com/encode/uvicorn/issues/1116 - https://github.com/benoitc/gunicorn/issues/2604 """ - loop = asyncio.get_running_loop() loop.add_signal_handler(signal.SIGINT, self.handle_exit, signal.SIGINT, None) @@ -36,14 +35,14 @@ class Logger(glogging.Logger): gunicorn logs to loguru. """ - def __init__(self, cfg): + def __init__(self, cfg) -> None: super().__init__(cfg) logging.getLogger("gunicorn.error").handlers = [InterceptHandler()] logging.getLogger("gunicorn.access").handlers = [InterceptHandler()] class LangflowApplication(BaseApplication): - def __init__(self, app, options=None): + def __init__(self, app, options=None) -> None: self.options = options or {} self.options["worker_class"] = "langflow.server.LangflowUvicornWorker" @@ -51,7 +50,7 @@ def __init__(self, app, options=None): self.application = app super().__init__() - def load_config(self): + def load_config(self) -> None: config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} for key, value in config.items(): self.cfg.set(key.lower(), value) diff --git a/src/backend/base/langflow/services/__init__.py b/src/backend/base/langflow/services/__init__.py index 8ac74b5b9157..a4c3bc2becb1 100644 --- a/src/backend/base/langflow/services/__init__.py +++ b/src/backend/base/langflow/services/__init__.py @@ -1,4 +1,4 @@ from .manager import service_manager from .schema import ServiceType -__all__ = ["service_manager", "ServiceType"] +__all__ = ["ServiceType", "service_manager"] diff --git a/src/backend/base/langflow/services/auth/factory.py b/src/backend/base/langflow/services/auth/factory.py index 63d5d2a6d17c..fbf734b3a8bf 100644 --- a/src/backend/base/langflow/services/auth/factory.py +++ b/src/backend/base/langflow/services/auth/factory.py @@ -5,7 +5,7 @@ class AuthServiceFactory(ServiceFactory): name = "auth_service" - def __init__(self): + def __init__(self) -> None: super().__init__(AuthService) def create(self, settings_service): diff --git a/src/backend/base/langflow/services/auth/service.py b/src/backend/base/langflow/services/auth/service.py index 5c3a89af6e22..fd7de2785f2a 100644 --- a/src/backend/base/langflow/services/auth/service.py +++ b/src/backend/base/langflow/services/auth/service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.base import Service @@ -9,5 +11,5 @@ class AuthService(Service): name = "auth_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service diff --git a/src/backend/base/langflow/services/auth/utils.py b/src/backend/base/langflow/services/auth/utils.py index 61af0d1f2b46..71dec85ec74d 100644 --- a/src/backend/base/langflow/services/auth/utils.py +++ b/src/backend/base/langflow/services/auth/utils.py @@ -1,8 +1,10 @@ +import asyncio import base64 import random import warnings +from collections.abc import Coroutine from datetime import datetime, timedelta, timezone -from typing import Annotated, Coroutine, Optional, Union +from typing import Annotated from uuid import UUID from cryptography.fernet import Fernet @@ -17,7 +19,8 @@ from langflow.services.database.models.api_key.model import ApiKey from langflow.services.database.models.user.crud import get_user_by_id, get_user_by_username, update_user_last_login_at from langflow.services.database.models.user.model import User, UserRead -from langflow.services.deps import get_session, get_settings_service +from langflow.services.deps import get_db_service, get_session, get_settings_service +from langflow.services.settings.service import SettingsService oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login", auto_error=False) @@ -26,71 +29,74 @@ api_key_query = APIKeyQuery(name=API_KEY_NAME, scheme_name="API key query", auto_error=False) api_key_header = APIKeyHeader(name=API_KEY_NAME, scheme_name="API key header", auto_error=False) +MINIMUM_KEY_LENGTH = 32 + # Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py -async def api_key_security( - query_param: str = Security(api_key_query), - header_param: str = Security(api_key_header), - db: Session = Depends(get_session), -) -> Optional[UserRead]: +def api_key_security( + query_param: Annotated[str, Security(api_key_query)], + header_param: Annotated[str, Security(api_key_header)], +) -> UserRead | None: settings_service = get_settings_service() - result: Optional[Union[ApiKey, User]] = None - if settings_service.auth_settings.AUTO_LOGIN: - # Get the first user - if not settings_service.auth_settings.SUPERUSER: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="Missing first superuser credentials", - ) + result: ApiKey | User | None = None - result = get_user_by_username(db, settings_service.auth_settings.SUPERUSER) + with get_db_service().with_session() as db: + if settings_service.auth_settings.AUTO_LOGIN: + # Get the first user + if not settings_service.auth_settings.SUPERUSER: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Missing first superuser credentials", + ) - elif not query_param and not header_param: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="An API key must be passed as query or header", - ) + result = get_user_by_username(db, settings_service.auth_settings.SUPERUSER) - elif query_param: - result = check_key(db, query_param) + elif not query_param and not header_param: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="An API key must be passed as query or header", + ) - else: - result = check_key(db, header_param) + elif query_param: + result = check_key(db, query_param) - if not result: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Invalid or missing API key", - ) - if isinstance(result, ApiKey): - return UserRead.model_validate(result.user, from_attributes=True) - elif isinstance(result, User): - return UserRead.model_validate(result, from_attributes=True) - raise ValueError("Invalid result type") + else: + result = check_key(db, header_param) + + if not result: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Invalid or missing API key", + ) + if isinstance(result, ApiKey): + return UserRead.model_validate(result.user, from_attributes=True) + if isinstance(result, User): + return UserRead.model_validate(result, from_attributes=True) + msg = "Invalid result type" + raise ValueError(msg) async def get_current_user( - token: str = Security(oauth2_login), - query_param: str = Security(api_key_query), - header_param: str = Security(api_key_header), - db: Session = Depends(get_session), + token: Annotated[str, Security(oauth2_login)], + query_param: Annotated[str, Security(api_key_query)], + header_param: Annotated[str, Security(api_key_header)], + db: Annotated[Session, Depends(get_session)], ) -> User: if token: return await get_current_user_by_jwt(token, db) - else: - user = await api_key_security(query_param, header_param, db) - if user: - return user + user = await asyncio.to_thread(api_key_security, query_param, header_param) + if user: + return user - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Invalid or missing API key", - ) + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Invalid or missing API key", + ) async def get_current_user_by_jwt( token: Annotated[str, Depends(oauth2_login)], - db: Session = Depends(get_session), + db: Annotated[Session, Depends(get_session)], ) -> User: settings_service = get_settings_service() @@ -111,8 +117,8 @@ async def get_current_user_by_jwt( with warnings.catch_warnings(): warnings.simplefilter("ignore") payload = jwt.decode(token, secret_key, algorithms=[settings_service.auth_settings.ALGORITHM]) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore + user_id: UUID = payload.get("sub") # type: ignore[assignment] + token_type: str = payload.get("type") # type: ignore[assignment] if expires := payload.get("exp", None): expires_datetime = datetime.fromtimestamp(expires, timezone.utc) if datetime.now(timezone.utc) > expires_datetime: @@ -131,8 +137,7 @@ async def get_current_user_by_jwt( headers={"WWW-Authenticate": "Bearer"}, ) except JWTError as e: - logger.error(f"JWT decoding error: {e}") - logger.exception(e) + logger.exception("JWT decoding error") raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", @@ -152,26 +157,25 @@ async def get_current_user_by_jwt( async def get_current_user_for_websocket( websocket: WebSocket, - db: Session = Depends(get_session), - query_param: str = Security(api_key_query), -) -> Optional[User]: + db: Annotated[Session, Depends(get_session)], + query_param: Annotated[str, Security(api_key_query)], +) -> User | None: token = websocket.query_params.get("token") api_key = websocket.query_params.get("x-api-key") if token: return await get_current_user_by_jwt(token, db) - elif api_key: - return await api_key_security(api_key, query_param, db) - else: - return None + if api_key: + return await asyncio.to_thread(api_key_security, api_key, query_param) + return None -def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]): +async def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]): if not current_user.is_active: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Inactive user") return current_user -def get_current_active_superuser(current_user: Annotated[User, Depends(get_current_user)]) -> User: +async def get_current_active_superuser(current_user: Annotated[User, Depends(get_current_user)]) -> User: if not current_user.is_active: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Inactive user") if not current_user.is_superuser: @@ -206,7 +210,7 @@ def create_token(data: dict, expires_delta: timedelta): def create_super_user( username: str, password: str, - db: Session = Depends(get_session), + db: Session, ) -> User: super_user = get_user_by_username(db, username) @@ -226,7 +230,7 @@ def create_super_user( return super_user -def create_user_longterm_token(db: Session = Depends(get_session)) -> tuple[UUID, dict]: +def create_user_longterm_token(db: Session) -> tuple[UUID, dict]: settings_service = get_settings_service() username = settings_service.auth_settings.SUPERUSER @@ -266,7 +270,7 @@ def get_user_id_from_token(token: str) -> UUID: return UUID(int=0) -def create_user_tokens(user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False) -> dict: +def create_user_tokens(user_id: UUID, db: Session, *, update_last_login: bool = False) -> dict: settings_service = get_settings_service() access_token_expires = timedelta(seconds=settings_service.auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS) @@ -292,7 +296,7 @@ def create_user_tokens(user_id: UUID, db: Session = Depends(get_session), update } -def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)): +def create_refresh_token(refresh_token: str, db: Session): settings_service = get_settings_service() try: @@ -304,8 +308,8 @@ def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)) settings_service.auth_settings.SECRET_KEY.get_secret_value(), algorithms=[settings_service.auth_settings.ALGORITHM], ) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore + user_id: UUID = payload.get("sub") # type: ignore[assignment] + token_type: str = payload.get("type") # type: ignore[assignment] if user_id is None or token_type == "": raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token") @@ -318,14 +322,14 @@ def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)) return create_user_tokens(user_id, db) except JWTError as e: - logger.error(f"JWT decoding error: {e}") + logger.exception("JWT decoding error") raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token", ) from e -def authenticate_user(username: str, password: str, db: Session = Depends(get_session)) -> Optional[User]: +def authenticate_user(username: str, password: str, db: Session) -> User | None: user = get_user_by_username(db, username) if not user: @@ -347,7 +351,7 @@ def add_padding(s): def ensure_valid_key(s: str) -> bytes: # If the key is too short, we'll use it as a seed to generate a valid key - if len(s) < 32: + if len(s) < MINIMUM_KEY_LENGTH: # Use the input as a seed for the random number generator random.seed(s) # Generate 32 random bytes @@ -358,27 +362,27 @@ def ensure_valid_key(s: str) -> bytes: return key -def get_fernet(settings_service=Depends(get_settings_service)): - SECRET_KEY: str = settings_service.auth_settings.SECRET_KEY.get_secret_value() - valid_key = ensure_valid_key(SECRET_KEY) - fernet = Fernet(valid_key) - return fernet +def get_fernet(settings_service: SettingsService): + secret_key: str = settings_service.auth_settings.SECRET_KEY.get_secret_value() + valid_key = ensure_valid_key(secret_key) + return Fernet(valid_key) -def encrypt_api_key(api_key: str, settings_service=Depends(get_settings_service)): +def encrypt_api_key(api_key: str, settings_service: SettingsService): fernet = get_fernet(settings_service) # Two-way encryption encrypted_key = fernet.encrypt(api_key.encode()) return encrypted_key.decode() -def decrypt_api_key(encrypted_api_key: str, settings_service=Depends(get_settings_service)): +def decrypt_api_key(encrypted_api_key: str, settings_service: SettingsService): fernet = get_fernet(settings_service) decrypted_key = "" # Two-way decryption if isinstance(encrypted_api_key, str): try: decrypted_key = fernet.decrypt(encrypted_api_key.encode()).decode() - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Failed to decrypt API key") decrypted_key = fernet.decrypt(encrypted_api_key).decode() return decrypted_key diff --git a/src/backend/base/langflow/services/base.py b/src/backend/base/langflow/services/base.py index 430ecd11ec1c..a903332e1591 100644 --- a/src/backend/base/langflow/services/base.py +++ b/src/backend/base/langflow/services/base.py @@ -7,7 +7,6 @@ class Service(ABC): def get_schema(self): """Build a dictionary listing all methods, their parameters, types, return types and documentation.""" - schema = {} ignore = ["teardown", "set_ready"] for method in dir(self): @@ -22,8 +21,8 @@ def get_schema(self): } return schema - async def teardown(self): - pass + async def teardown(self) -> None: + return - def set_ready(self): + def set_ready(self) -> None: self.ready = True diff --git a/src/backend/base/langflow/services/cache/__init__.py b/src/backend/base/langflow/services/cache/__init__.py index 48fb9837abc4..72f74d7dadea 100644 --- a/src/backend/base/langflow/services/cache/__init__.py +++ b/src/backend/base/langflow/services/cache/__init__.py @@ -3,10 +3,10 @@ from . import factory, service __all__ = [ - "factory", - "service", - "ThreadingInMemoryCache", "AsyncInMemoryCache", "CacheService", "RedisCache", + "ThreadingInMemoryCache", + "factory", + "service", ] diff --git a/src/backend/base/langflow/services/cache/base.py b/src/backend/base/langflow/services/cache/base.py index 02d64518322f..7444767834d7 100644 --- a/src/backend/base/langflow/services/cache/base.py +++ b/src/backend/base/langflow/services/cache/base.py @@ -1,7 +1,7 @@ import abc import asyncio import threading -from typing import Generic, Optional, TypeVar +from typing import Generic, TypeVar from langflow.services.base import Service @@ -10,63 +10,69 @@ class CacheService(Service, Generic[LockType]): - """ - Abstract base class for a cache. - """ + """Abstract base class for a cache.""" name = "cache_service" @abc.abstractmethod - def get(self, key, lock: Optional[LockType] = None): - """ - Retrieve an item from the cache. + def get(self, key, lock: LockType | None = None): + """Retrieve an item from the cache. Args: key: The key of the item to retrieve. + lock: A lock to use for the operation. Returns: - The value associated with the key, or None if the key is not found. + The value associated with the key, or CACHE_MISS if the key is not found. """ @abc.abstractmethod - def set(self, key, value, lock: Optional[LockType] = None): - """ - Add an item to the cache. + def set(self, key, value, lock: LockType | None = None): + """Add an item to the cache. Args: key: The key of the item. value: The value to cache. + lock: A lock to use for the operation. """ @abc.abstractmethod - def upsert(self, key, value, lock: Optional[LockType] = None): - """ - Add an item to the cache if it doesn't exist, or update it if it does. + def upsert(self, key, value, lock: LockType | None = None): + """Add an item to the cache if it doesn't exist, or update it if it does. Args: key: The key of the item. value: The value to cache. + lock: A lock to use for the operation. """ @abc.abstractmethod - def delete(self, key, lock: Optional[LockType] = None): - """ - Remove an item from the cache. + def delete(self, key, lock: LockType | None = None): + """Remove an item from the cache. Args: key: The key of the item to remove. + lock: A lock to use for the operation. """ @abc.abstractmethod - def clear(self, lock: Optional[LockType] = None): - """ - Clear all items from the cache. - """ + def clear(self, lock: LockType | None = None): + """Clear all items from the cache.""" @abc.abstractmethod - def __contains__(self, key): + def contains(self, key) -> bool: + """Check if the key is in the cache. + + Args: + key: The key of the item to check. + + Returns: + True if the key is in the cache, False otherwise. """ - Check if the key is in the cache. + + @abc.abstractmethod + def __contains__(self, key) -> bool: + """Check if the key is in the cache. Args: key: The key of the item to check. @@ -77,17 +83,15 @@ def __contains__(self, key): @abc.abstractmethod def __getitem__(self, key): - """ - Retrieve an item from the cache using the square bracket notation. + """Retrieve an item from the cache using the square bracket notation. Args: key: The key of the item to retrieve. """ @abc.abstractmethod - def __setitem__(self, key, value): - """ - Add an item to the cache using the square bracket notation. + def __setitem__(self, key, value) -> None: + """Add an item to the cache using the square bracket notation. Args: key: The key of the item. @@ -95,9 +99,8 @@ def __setitem__(self, key, value): """ @abc.abstractmethod - def __delitem__(self, key): - """ - Remove an item from the cache using the square bracket notation. + def __delitem__(self, key) -> None: + """Remove an item from the cache using the square bracket notation. Args: key: The key of the item to remove. @@ -105,63 +108,58 @@ def __delitem__(self, key): class AsyncBaseCacheService(Service, Generic[AsyncLockType]): - """ - Abstract base class for a async cache. - """ + """Abstract base class for a async cache.""" name = "cache_service" @abc.abstractmethod - async def get(self, key, lock: Optional[AsyncLockType] = None): - """ - Retrieve an item from the cache. + async def get(self, key, lock: AsyncLockType | None = None): + """Retrieve an item from the cache. Args: key: The key of the item to retrieve. + lock: A lock to use for the operation. Returns: - The value associated with the key, or None if the key is not found. + The value associated with the key, or CACHE_MISS if the key is not found. """ @abc.abstractmethod - async def set(self, key, value, lock: Optional[AsyncLockType] = None): - """ - Add an item to the cache. + async def set(self, key, value, lock: AsyncLockType | None = None): + """Add an item to the cache. Args: key: The key of the item. value: The value to cache. + lock: A lock to use for the operation. """ @abc.abstractmethod - async def upsert(self, key, value, lock: Optional[AsyncLockType] = None): - """ - Add an item to the cache if it doesn't exist, or update it if it does. + async def upsert(self, key, value, lock: AsyncLockType | None = None): + """Add an item to the cache if it doesn't exist, or update it if it does. Args: key: The key of the item. value: The value to cache. + lock: A lock to use for the operation. """ @abc.abstractmethod - async def delete(self, key, lock: Optional[AsyncLockType] = None): - """ - Remove an item from the cache. + async def delete(self, key, lock: AsyncLockType | None = None): + """Remove an item from the cache. Args: key: The key of the item to remove. + lock: A lock to use for the operation. """ @abc.abstractmethod - async def clear(self, lock: Optional[AsyncLockType] = None): - """ - Clear all items from the cache. - """ + async def clear(self, lock: AsyncLockType | None = None): + """Clear all items from the cache.""" @abc.abstractmethod - def __contains__(self, key): - """ - Check if the key is in the cache. + async def contains(self, key) -> bool: + """Check if the key is in the cache. Args: key: The key of the item to check. diff --git a/src/backend/base/langflow/services/cache/disk.py b/src/backend/base/langflow/services/cache/disk.py index dbbd85f1335c..7b9eff338543 100644 --- a/src/backend/base/langflow/services/cache/disk.py +++ b/src/backend/base/langflow/services/cache/disk.py @@ -1,7 +1,7 @@ import asyncio import pickle import time -from typing import Generic, Optional +from typing import Generic from diskcache import Cache from loguru import logger @@ -10,8 +10,8 @@ from langflow.services.cache.utils import CACHE_MISS -class AsyncDiskCache(AsyncBaseCacheService, Generic[AsyncLockType]): # type: ignore - def __init__(self, cache_dir, max_size=None, expiration_time=3600): +class AsyncDiskCache(AsyncBaseCacheService, Generic[AsyncLockType]): + def __init__(self, cache_dir, max_size=None, expiration_time=3600) -> None: self.cache = Cache(cache_dir) # Let's clear the cache for now to maintain a similar # behavior as the in-memory cache @@ -23,74 +23,73 @@ def __init__(self, cache_dir, max_size=None, expiration_time=3600): self.max_size = max_size self.expiration_time = expiration_time - async def get(self, key, lock: Optional[asyncio.Lock] = None): + async def get(self, key, lock: asyncio.Lock | None = None): if not lock: async with self.lock: - return await self._get(key) + return await asyncio.to_thread(self._get, key) else: - return await self._get(key) + return await asyncio.to_thread(self._get, key) - async def _get(self, key): - item = await asyncio.to_thread(self.cache.get, key, default=None) + def _get(self, key): + item = self.cache.get(key, default=None) if item: if time.time() - item["time"] < self.expiration_time: - await asyncio.to_thread(self.cache.touch, key) # Refresh the expiry time + self.cache.touch(key) # Refresh the expiry time return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] - else: - logger.info(f"Cache item for key '{key}' has expired and will be deleted.") - await self._delete(key) # Log before deleting the expired item + logger.info(f"Cache item for key '{key}' has expired and will be deleted.") + self.cache.delete(key) # Log before deleting the expired item return CACHE_MISS - async def set(self, key, value, lock: Optional[asyncio.Lock] = None): + async def set(self, key, value, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._set(key, value) else: await self._set(key, value) - async def _set(self, key, value): + async def _set(self, key, value) -> None: if self.max_size and len(self.cache) >= self.max_size: await asyncio.to_thread(self.cache.cull) - item = {"value": pickle.dumps(value) if not isinstance(value, (str, bytes)) else value, "time": time.time()} + item = {"value": pickle.dumps(value) if not isinstance(value, str | bytes) else value, "time": time.time()} await asyncio.to_thread(self.cache.set, key, item) - async def delete(self, key, lock: Optional[asyncio.Lock] = None): + async def delete(self, key, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._delete(key) else: await self._delete(key) - async def _delete(self, key): + async def _delete(self, key) -> None: await asyncio.to_thread(self.cache.delete, key) - async def clear(self, lock: Optional[asyncio.Lock] = None): + async def clear(self, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._clear() else: await self._clear() - async def _clear(self): + async def _clear(self) -> None: await asyncio.to_thread(self.cache.clear) - async def upsert(self, key, value, lock: Optional[asyncio.Lock] = None): + async def upsert(self, key, value, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._upsert(key, value) else: await self._upsert(key, value) - async def _upsert(self, key, value): - existing_value = await self.get(key) + async def _upsert(self, key, value) -> None: + existing_value = await asyncio.to_thread(self._get, key) if existing_value is not CACHE_MISS and isinstance(existing_value, dict) and isinstance(value, dict): existing_value.update(value) value = existing_value await self.set(key, value) - def __contains__(self, key): - return asyncio.run(asyncio.to_thread(self.cache.__contains__, key)) + async def contains(self, key) -> bool: + return await asyncio.to_thread(self.cache.__contains__, key) - async def teardown(self): + async def teardown(self) -> None: # Clean up the cache directory self.cache.clear(retry=True) diff --git a/src/backend/base/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py index 32bb94f872ff..3def8ebc1757 100644 --- a/src/backend/base/langflow/services/cache/factory.py +++ b/src/backend/base/langflow/services/cache/factory.py @@ -1,19 +1,21 @@ +from __future__ import annotations + from typing import TYPE_CHECKING +from langflow.logging.logger import logger from langflow.services.cache.disk import AsyncDiskCache from langflow.services.cache.service import AsyncInMemoryCache, CacheService, RedisCache, ThreadingInMemoryCache from langflow.services.factory import ServiceFactory -from langflow.logging.logger import logger if TYPE_CHECKING: from langflow.services.settings.service import SettingsService class CacheServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(CacheService) - def create(self, settings_service: "SettingsService"): + def create(self, settings_service: SettingsService): # Here you would have logic to create and configure a CacheService # based on the settings_service @@ -29,16 +31,17 @@ def create(self, settings_service: "SettingsService"): if redis_cache.is_connected(): logger.debug("Redis cache is connected") return redis_cache - else: - # do not attempt to fallback to another cache type - raise ConnectionError("Failed to connect to Redis cache") + # do not attempt to fallback to another cache type + msg = "Failed to connect to Redis cache" + raise ConnectionError(msg) - elif settings_service.settings.cache_type == "memory": + if settings_service.settings.cache_type == "memory": return ThreadingInMemoryCache(expiration_time=settings_service.settings.cache_expire) - elif settings_service.settings.cache_type == "async": + if settings_service.settings.cache_type == "async": return AsyncInMemoryCache(expiration_time=settings_service.settings.cache_expire) - elif settings_service.settings.cache_type == "disk": + if settings_service.settings.cache_type == "disk": return AsyncDiskCache( cache_dir=settings_service.settings.config_dir, expiration_time=settings_service.settings.cache_expire, ) + return None diff --git a/src/backend/base/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py index 021c33f90281..f917be6a57a0 100644 --- a/src/backend/base/langflow/services/cache/service.py +++ b/src/backend/base/langflow/services/cache/service.py @@ -3,17 +3,17 @@ import threading import time from collections import OrderedDict -from typing import Generic, Optional +from typing import Generic, Union from loguru import logger +from typing_extensions import override from langflow.services.cache.base import AsyncBaseCacheService, AsyncLockType, CacheService, LockType from langflow.services.cache.utils import CACHE_MISS -class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore - """ - A simple in-memory cache using an OrderedDict. +class ThreadingInMemoryCache(CacheService, Generic[LockType]): + """A simple in-memory cache using an OrderedDict. This cache supports setting a maximum size and expiration time for cached items. When the cache is full, it uses a Least Recently Used (LRU) eviction policy. @@ -24,7 +24,6 @@ class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. Example: - cache = InMemoryCache(max_size=3, expiration_time=5) # setting cache values @@ -37,59 +36,51 @@ class ThreadingInMemoryCache(CacheService, Generic[LockType]): # type: ignore b = cache["b"] """ - def __init__(self, max_size=None, expiration_time=60 * 60): - """ - Initialize a new InMemoryCache instance. + def __init__(self, max_size=None, expiration_time=60 * 60) -> None: + """Initialize a new InMemoryCache instance. Args: max_size (int, optional): Maximum number of items to store in the cache. expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. """ - self._cache = OrderedDict() + self._cache: OrderedDict = OrderedDict() self._lock = threading.RLock() self.max_size = max_size self.expiration_time = expiration_time - def get(self, key, lock: Optional[threading.Lock] = None): - """ - Retrieve an item from the cache. + def get(self, key, lock: Union[threading.Lock, None] = None): # noqa: UP007 + """Retrieve an item from the cache. Args: key: The key of the item to retrieve. + lock: A lock to use for the operation. Returns: - The value associated with the key, or None if the key is not found or the item has expired. + The value associated with the key, or CACHE_MISS if the key is not found or the item has expired. """ with lock or self._lock: return self._get_without_lock(key) def _get_without_lock(self, key): - """ - Retrieve an item from the cache without acquiring the lock. - """ + """Retrieve an item from the cache without acquiring the lock.""" if item := self._cache.get(key): if self.expiration_time is None or time.time() - item["time"] < self.expiration_time: # Move the key to the end to make it recently used self._cache.move_to_end(key) # Check if the value is pickled - if isinstance(item["value"], bytes): - value = pickle.loads(item["value"]) - else: - value = item["value"] - return value - else: - self.delete(key) - return None + return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] + self.delete(key) + return CACHE_MISS - def set(self, key, value, lock: Optional[threading.Lock] = None): - """ - Add an item to the cache. + def set(self, key, value, lock: Union[threading.Lock, None] = None) -> None: # noqa: UP007 + """Add an item to the cache. If the cache is full, the least recently used item is evicted. Args: key: The key of the item. value: The value to cache. + lock: A lock to use for the operation. """ with lock or self._lock: if key in self._cache: @@ -102,31 +93,33 @@ def set(self, key, value, lock: Optional[threading.Lock] = None): self._cache[key] = {"value": value, "time": time.time()} - def upsert(self, key, value, lock: Optional[threading.Lock] = None): - """ - Inserts or updates a value in the cache. + def upsert(self, key, value, lock: Union[threading.Lock, None] = None) -> None: # noqa: UP007 + """Inserts or updates a value in the cache. + If the existing value and the new value are both dictionaries, they are merged. Args: key: The key of the item. value: The value to insert or update. + lock: A lock to use for the operation. """ with lock or self._lock: existing_value = self._get_without_lock(key) - if existing_value is not None and isinstance(existing_value, dict) and isinstance(value, dict): + if existing_value is not CACHE_MISS and isinstance(existing_value, dict) and isinstance(value, dict): existing_value.update(value) value = existing_value self.set(key, value) - def get_or_set(self, key, value, lock: Optional[threading.Lock] = None): - """ - Retrieve an item from the cache. If the item does not exist, - set it with the provided value. + def get_or_set(self, key, value, lock: Union[threading.Lock, None] = None): # noqa: UP007 + """Retrieve an item from the cache. + + If the item does not exist, set it with the provided value. Args: key: The key of the item. value: The value to cache if the item doesn't exist. + lock: A lock to use for the operation. Returns: The cached value associated with the key. @@ -137,51 +130,46 @@ def get_or_set(self, key, value, lock: Optional[threading.Lock] = None): self.set(key, value) return value - def delete(self, key, lock: Optional[threading.Lock] = None): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ + def delete(self, key, lock: Union[threading.Lock, None] = None) -> None: # noqa: UP007 with lock or self._lock: self._cache.pop(key, None) - def clear(self, lock: Optional[threading.Lock] = None): - """ - Clear all items from the cache. - """ + def clear(self, lock: Union[threading.Lock, None] = None) -> None: # noqa: UP007 + """Clear all items from the cache.""" with lock or self._lock: self._cache.clear() - def __contains__(self, key): + def contains(self, key) -> bool: """Check if the key is in the cache.""" return key in self._cache + def __contains__(self, key) -> bool: + """Check if the key is in the cache.""" + return self.contains(key) + def __getitem__(self, key): """Retrieve an item from the cache using the square bracket notation.""" return self.get(key) - def __setitem__(self, key, value): + def __setitem__(self, key, value) -> None: """Add an item to the cache using the square bracket notation.""" self.set(key, value) - def __delitem__(self, key): + def __delitem__(self, key) -> None: """Remove an item from the cache using the square bracket notation.""" self.delete(key) - def __len__(self): + def __len__(self) -> int: """Return the number of items in the cache.""" return len(self._cache) - def __repr__(self): + def __repr__(self) -> str: """Return a string representation of the InMemoryCache instance.""" return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" -class RedisCache(AsyncBaseCacheService, Generic[LockType]): # type: ignore - """ - A Redis-based cache implementation. +class RedisCache(AsyncBaseCacheService, Generic[LockType]): + """A Redis-based cache implementation. This cache supports setting an expiration time for cached items. @@ -189,7 +177,6 @@ class RedisCache(AsyncBaseCacheService, Generic[LockType]): # type: ignore expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. Example: - cache = RedisCache(expiration_time=5) # setting cache values @@ -202,87 +189,76 @@ class RedisCache(AsyncBaseCacheService, Generic[LockType]): # type: ignore b = cache["b"] """ - def __init__(self, host="localhost", port=6379, db=0, url=None, expiration_time=60 * 60): - """ - Initialize a new RedisCache instance. + def __init__(self, host="localhost", port=6379, db=0, url=None, expiration_time=60 * 60) -> None: + """Initialize a new RedisCache instance. Args: host (str, optional): Redis host. port (int, optional): Redis port. db (int, optional): Redis DB. + url (str, optional): Redis URL. expiration_time (int, optional): Time in seconds after which a - ached item expires. Default is 1 hour. + cached item expires. Default is 1 hour. """ try: - import redis + from redis.asyncio import StrictRedis except ImportError as exc: - raise ImportError( + msg = ( "RedisCache requires the redis-py package." " Please install Langflow with the deploy extra: pip install langflow[deploy]" - ) from exc + ) + raise ImportError(msg) from exc logger.warning( "RedisCache is an experimental feature and may not work as expected." " Please report any issues to our GitHub repository." ) if url: - self._client = redis.StrictRedis.from_url(url) + self._client = StrictRedis.from_url(url) else: - self._client = redis.StrictRedis(host=host, port=port, db=db) + self._client = StrictRedis(host=host, port=port, db=db) self.expiration_time = expiration_time # check connection - def is_connected(self): - """ - Check if the Redis client is connected. - """ + def is_connected(self) -> bool: + """Check if the Redis client is connected.""" import redis try: - self._client.ping() - return True - except redis.exceptions.ConnectionError as exc: - logger.error(f"RedisCache could not connect to the Redis server: {exc}") + asyncio.run(self._client.ping()) + except redis.exceptions.ConnectionError: + logger.exception("RedisCache could not connect to the Redis server") return False + return True + @override async def get(self, key, lock=None): - """ - Retrieve an item from the cache. - - Args: - key: The key of the item to retrieve. - - Returns: - The value associated with the key, or None if the key is not found. - """ if key is None: - return None - value = self._client.get(str(key)) - return pickle.loads(value) if value else None - - async def set(self, key, value, lock=None): - """ - Add an item to the cache. + return CACHE_MISS + value = await self._client.get(str(key)) + return pickle.loads(value) if value else CACHE_MISS - Args: - key: The key of the item. - value: The value to cache. - """ + @override + async def set(self, key, value, lock=None) -> None: try: if pickled := pickle.dumps(value): - result = self._client.setex(str(key), self.expiration_time, pickled) + result = await self._client.setex(str(key), self.expiration_time, pickled) if not result: - raise ValueError("RedisCache could not set the value.") + msg = "RedisCache could not set the value." + raise ValueError(msg) except TypeError as exc: - raise TypeError("RedisCache only accepts values that can be pickled. ") from exc + msg = "RedisCache only accepts values that can be pickled. " + raise TypeError(msg) from exc + + @override + async def upsert(self, key, value, lock=None) -> None: + """Inserts or updates a value in the cache. - async def upsert(self, key, value, lock=None): - """ - Inserts or updates a value in the cache. If the existing value and the new value are both dictionaries, they are merged. Args: key: The key of the item. value: The value to insert or update. + lock: A lock to use for the operation. """ if key is None: return @@ -293,51 +269,35 @@ async def upsert(self, key, value, lock=None): await self.set(key, value) - async def delete(self, key, lock=None): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ - self._client.delete(key) + @override + async def delete(self, key, lock=None) -> None: + await self._client.delete(key) - async def clear(self, lock=None): - """ - Clear all items from the cache. - """ - self._client.flushdb() + @override + async def clear(self, lock=None) -> None: + """Clear all items from the cache.""" + await self._client.flushdb() - def __contains__(self, key): + async def contains(self, key) -> bool: """Check if the key is in the cache.""" - return False if key is None else self._client.exists(str(key)) - - async def __getitem__(self, key): - """Retrieve an item from the cache using the square bracket notation.""" - return self.get(key) - - async def __setitem__(self, key, value): - """Add an item to the cache using the square bracket notation.""" - self.set(key, value) - - async def __delitem__(self, key): - """Remove an item from the cache using the square bracket notation.""" - self.delete(key) + if key is None: + return False + return bool(await self._client.exists(str(key))) - def __repr__(self): + def __repr__(self) -> str: """Return a string representation of the RedisCache instance.""" return f"RedisCache(expiration_time={self.expiration_time})" -class AsyncInMemoryCache(AsyncBaseCacheService, Generic[AsyncLockType]): # type: ignore - def __init__(self, max_size=None, expiration_time=3600): - self.cache = OrderedDict() +class AsyncInMemoryCache(AsyncBaseCacheService, Generic[AsyncLockType]): + def __init__(self, max_size=None, expiration_time=3600) -> None: + self.cache: OrderedDict = OrderedDict() self.lock = asyncio.Lock() self.max_size = max_size self.expiration_time = expiration_time - async def get(self, key, lock: Optional[asyncio.Lock] = None): + async def get(self, key, lock: asyncio.Lock | None = None): if not lock: async with self.lock: return await self._get(key) @@ -350,12 +310,11 @@ async def _get(self, key): if time.time() - item["time"] < self.expiration_time: self.cache.move_to_end(key) return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] - else: - logger.info(f"Cache item for key '{key}' has expired and will be deleted.") - await self._delete(key) # Log before deleting the expired item + logger.info(f"Cache item for key '{key}' has expired and will be deleted.") + await self._delete(key) # Log before deleting the expired item return CACHE_MISS - async def set(self, key, value, lock: Optional[asyncio.Lock] = None): + async def set(self, key, value, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._set( @@ -368,46 +327,46 @@ async def set(self, key, value, lock: Optional[asyncio.Lock] = None): value, ) - async def _set(self, key, value): + async def _set(self, key, value) -> None: if self.max_size and len(self.cache) >= self.max_size: self.cache.popitem(last=False) self.cache[key] = {"value": value, "time": time.time()} self.cache.move_to_end(key) - async def delete(self, key, lock: Optional[asyncio.Lock] = None): + async def delete(self, key, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._delete(key) else: await self._delete(key) - async def _delete(self, key): + async def _delete(self, key) -> None: if key in self.cache: del self.cache[key] - async def clear(self, lock: Optional[asyncio.Lock] = None): + async def clear(self, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._clear() else: await self._clear() - async def _clear(self): + async def _clear(self) -> None: self.cache.clear() - async def upsert(self, key, value, lock: Optional[asyncio.Lock] = None): + async def upsert(self, key, value, lock: asyncio.Lock | None = None) -> None: if not lock: async with self.lock: await self._upsert(key, value) else: await self._upsert(key, value) - async def _upsert(self, key, value): + async def _upsert(self, key, value) -> None: existing_value = await self.get(key) if existing_value is not None and isinstance(existing_value, dict) and isinstance(value, dict): existing_value.update(value) value = existing_value await self.set(key, value) - def __contains__(self, key): + async def contains(self, key) -> bool: return key in self.cache diff --git a/src/backend/base/langflow/services/cache/utils.py b/src/backend/base/langflow/services/cache/utils.py index c2f3c961124c..5ca3e0ada0e2 100644 --- a/src/backend/base/langflow/services/cache/utils.py +++ b/src/backend/base/langflow/services/cache/utils.py @@ -1,10 +1,9 @@ import base64 import contextlib import hashlib -import os import tempfile from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from fastapi import UploadFile from platformdirs import user_cache_dir @@ -12,7 +11,7 @@ if TYPE_CHECKING: from langflow.api.v1.schemas import BuildStatus -CACHE: Dict[str, Any] = {} +CACHE: dict[str, Any] = {} CACHE_DIR = user_cache_dir("langflow", "langflow") @@ -20,10 +19,10 @@ class CacheMiss: - def __repr__(self): + def __repr__(self) -> str: return "" - def __bool__(self): + def __bool__(self) -> bool: return False @@ -33,7 +32,7 @@ def wrapper(*args, **kwargs): cache_path = Path(CACHE_DIR) / PREFIX # Create the destination folder if it doesn't exist - os.makedirs(cache_path, exist_ok=True) + cache_path.mkdir(parents=True, exist_ok=True) return func(*args, **kwargs) @@ -41,7 +40,7 @@ def wrapper(*args, **kwargs): @create_cache_folder -def clear_old_cache_files(max_cache_size: int = 3): +def clear_old_cache_files(max_cache_size: int = 3) -> None: cache_dir = Path(tempfile.gettempdir()) / PREFIX cache_files = list(cache_dir.glob("*.dill")) @@ -50,7 +49,7 @@ def clear_old_cache_files(max_cache_size: int = 3): for cache_file in cache_files_sorted_by_mtime[max_cache_size:]: with contextlib.suppress(OSError): - os.remove(cache_file) + cache_file.unlink() def filter_json(json_data): @@ -79,40 +78,40 @@ def filter_json(json_data): @create_cache_folder def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str: - """ - Save a binary file to the specified folder. + """Save a binary file to the specified folder. Args: content: The content of the file as a bytes object. file_name: The name of the file, including its extension. + accepted_types: A list of accepted file types. Returns: The path to the saved file. """ if not any(file_name.endswith(suffix) for suffix in accepted_types): - raise ValueError(f"File {file_name} is not accepted") + msg = f"File {file_name} is not accepted" + raise ValueError(msg) # Get the destination folder cache_path = Path(CACHE_DIR) / PREFIX if not content: - raise ValueError("Please, reload the file in the loader.") + msg = "Please, reload the file in the loader." + raise ValueError(msg) data = content.split(",")[1] decoded_bytes = base64.b64decode(data) # Create the full file path - file_path = os.path.join(cache_path, file_name) + file_path = cache_path / file_name # Save the binary content to the file - with open(file_path, "wb") as file: - file.write(decoded_bytes) + file_path.write_bytes(decoded_bytes) - return file_path + return str(file_path) @create_cache_folder def save_uploaded_file(file: UploadFile, folder_name): - """ - Save an uploaded file to the specified folder with a hash of its content as the file name. + """Save an uploaded file to the specified folder with a hash of its content as the file name. Args: file: The uploaded file object. @@ -124,10 +123,7 @@ def save_uploaded_file(file: UploadFile, folder_name): cache_path = Path(CACHE_DIR) folder_path = cache_path / folder_name filename = file.filename - if isinstance(filename, str) or isinstance(filename, Path): - file_extension = Path(filename).suffix - else: - file_extension = "" + file_extension = Path(filename).suffix if isinstance(filename, str | Path) else "" file_object = file.file # Create the folder if it doesn't exist @@ -151,17 +147,19 @@ def save_uploaded_file(file: UploadFile, folder_name): # Save the file with the hash as its name file_path = folder_path / file_name - with open(file_path, "wb") as new_file: + + with file_path.open("wb") as new_file: while chunk := file_object.read(8192): new_file.write(chunk) return file_path -def update_build_status(cache_service, flow_id: str, status: "BuildStatus"): +def update_build_status(cache_service, flow_id: str, status: "BuildStatus") -> None: cached_flow = cache_service[flow_id] if cached_flow is None: - raise ValueError(f"Flow {flow_id} not found in cache") + msg = f"Flow {flow_id} not found in cache" + raise ValueError(msg) cached_flow["status"] = status cache_service[flow_id] = cached_flow cached_flow["status"] = status diff --git a/src/backend/base/langflow/services/chat/cache.py b/src/backend/base/langflow/services/chat/cache.py index 959ea877c6a0..8943015e7bf8 100644 --- a/src/backend/base/langflow/services/chat/cache.py +++ b/src/backend/base/langflow/services/chat/cache.py @@ -1,5 +1,6 @@ +from collections.abc import Awaitable, Callable from contextlib import contextmanager -from typing import Any, Awaitable, Callable, List, Optional +from typing import Any import pandas as pd from PIL import Image @@ -10,18 +11,18 @@ class Subject: """Base class for implementing the observer pattern.""" - def __init__(self): - self.observers: List[Callable[[], None]] = [] + def __init__(self) -> None: + self.observers: list[Callable[[], None]] = [] - def attach(self, observer: Callable[[], None]): + def attach(self, observer: Callable[[], None]) -> None: """Attach an observer to the subject.""" self.observers.append(observer) - def detach(self, observer: Callable[[], None]): + def detach(self, observer: Callable[[], None]) -> None: """Detach an observer from the subject.""" self.observers.remove(observer) - def notify(self): + def notify(self) -> None: """Notify all observers about an event.""" for observer in self.observers: if observer is None: @@ -32,18 +33,18 @@ def notify(self): class AsyncSubject: """Base class for implementing the async observer pattern.""" - def __init__(self): - self.observers: List[Callable[[], Awaitable]] = [] + def __init__(self) -> None: + self.observers: list[Callable[[], Awaitable]] = [] - def attach(self, observer: Callable[[], Awaitable]): + def attach(self, observer: Callable[[], Awaitable]) -> None: """Attach an observer to the subject.""" self.observers.append(observer) - def detach(self, observer: Callable[[], Awaitable]): + def detach(self, observer: Callable[[], Awaitable]) -> None: """Detach an observer from the subject.""" self.observers.remove(observer) - async def notify(self): + async def notify(self) -> None: """Notify all observers about an event.""" for observer in self.observers: if observer is None: @@ -56,16 +57,15 @@ class CacheService(Subject, Service): name = "cache_service" - def __init__(self): + def __init__(self) -> None: super().__init__() - self._cache = {} - self.current_client_id = None - self.current_cache = {} + self._cache: dict[str, Any] = {} + self.current_client_id: str | None = None + self.current_cache: dict[str, Any] = {} @contextmanager def set_client_id(self, client_id: str): - """ - Context manager to set the current client_id and associated cache. + """Context manager to set the current client_id and associated cache. Args: client_id (str): The client identifier. @@ -77,25 +77,22 @@ def set_client_id(self, client_id: str): yield finally: self.current_client_id = previous_client_id - self.current_cache = self._cache.get(self.current_client_id, {}) + self.current_cache = self._cache.setdefault(previous_client_id, {}) if previous_client_id else {} - def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None): - """ - Add an object to the current client's cache. + def add(self, name: str, obj: Any, obj_type: str, extension: str | None = None) -> None: + """Add an object to the current client's cache. Args: name (str): The cache key. obj (Any): The object to cache. obj_type (str): The type of the object. + extension: The file extension of the object. """ object_extensions = { "image": "png", "pandas": "csv", } - if obj_type in object_extensions: - _extension = object_extensions[obj_type] - else: - _extension = type(obj).__name__.lower() + _extension = object_extensions[obj_type] if obj_type in object_extensions else type(obj).__name__.lower() self.current_cache[name] = { "obj": obj, "type": obj_type, @@ -103,35 +100,35 @@ def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = Non } self.notify() - def add_pandas(self, name: str, obj: Any): - """ - Add a pandas DataFrame or Series to the current client's cache. + def add_pandas(self, name: str, obj: Any) -> None: + """Add a pandas DataFrame or Series to the current client's cache. Args: name (str): The cache key. obj (Any): The pandas DataFrame or Series object. """ - if isinstance(obj, (pd.DataFrame, pd.Series)): + if isinstance(obj, pd.DataFrame | pd.Series): self.add(name, obj.to_csv(), "pandas", extension="csv") else: - raise ValueError("Object is not a pandas DataFrame or Series") + msg = "Object is not a pandas DataFrame or Series" + raise TypeError(msg) - def add_image(self, name: str, obj: Any, extension: str = "png"): - """ - Add a PIL Image to the current client's cache. + def add_image(self, name: str, obj: Any, extension: str = "png") -> None: + """Add a PIL Image to the current client's cache. Args: name (str): The cache key. obj (Any): The PIL Image object. + extension: The file extension of the image. """ if isinstance(obj, Image.Image): self.add(name, obj, "image", extension=extension) else: - raise ValueError("Object is not a PIL Image") + msg = "Object is not a PIL Image" + raise TypeError(msg) def get(self, name: str): - """ - Get an object from the current client's cache. + """Get an object from the current client's cache. Args: name (str): The cache key. @@ -142,8 +139,7 @@ def get(self, name: str): return self.current_cache[name] def get_last(self): - """ - Get the last added item in the current client's cache. + """Get the last added item in the current client's cache. Returns: The last added item in the cache. diff --git a/src/backend/base/langflow/services/chat/factory.py b/src/backend/base/langflow/services/chat/factory.py index 337488e0f444..e554a34dafad 100644 --- a/src/backend/base/langflow/services/chat/factory.py +++ b/src/backend/base/langflow/services/chat/factory.py @@ -3,7 +3,7 @@ class ChatServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(ChatService) def create(self): diff --git a/src/backend/base/langflow/services/chat/service.py b/src/backend/base/langflow/services/chat/service.py index abf57e4e3ea8..2f73578eac92 100644 --- a/src/backend/base/langflow/services/chat/service.py +++ b/src/backend/base/langflow/services/chat/service.py @@ -1,78 +1,25 @@ import asyncio from collections import defaultdict from threading import RLock -from typing import Any, Optional +from typing import Any from langflow.services.base import Service -from langflow.services.cache.base import AsyncBaseCacheService +from langflow.services.cache.base import AsyncBaseCacheService, CacheService from langflow.services.deps import get_cache_service class ChatService(Service): - """ - Service class for managing chat-related operations. - """ + """Service class for managing chat-related operations.""" name = "chat_service" - def __init__(self): - self._async_cache_locks = defaultdict(asyncio.Lock) - self._sync_cache_locks = defaultdict(RLock) - self.cache_service = get_cache_service() + def __init__(self) -> None: + self.async_cache_locks: dict[str, asyncio.Lock] = defaultdict(asyncio.Lock) + self._sync_cache_locks: dict[str, RLock] = defaultdict(RLock) + self.cache_service: CacheService | AsyncBaseCacheService = get_cache_service() - def _get_lock(self, key: str): - """ - Retrieves the lock associated with the given key. - - Args: - key (str): The key to retrieve the lock for. - - Returns: - threading.Lock or asyncio.Lock: The lock associated with the given key. - """ - if isinstance(self.cache_service, AsyncBaseCacheService): - return self._async_cache_locks[key] - else: - return self._sync_cache_locks[key] - - async def _perform_cache_operation( - self, operation: str, key: str, data: Any = None, lock: Optional[asyncio.Lock] = None - ): - """ - Perform a cache operation based on the given operation type. - - Args: - operation (str): The type of cache operation to perform. Possible values are "upsert", "get", or "delete". - key (str): The key associated with the cache operation. - data (Any, optional): The data to be stored in the cache. Only applicable for "upsert" operation. Defaults to None. - lock (Optional[asyncio.Lock], optional): The lock to be used for the cache operation. Defaults to None. - - Returns: - Any: The result of the cache operation. Only applicable for "get" operation. - - Raises: - None - - """ - lock = lock or self._get_lock(key) - if isinstance(self.cache_service, AsyncBaseCacheService): - if operation == "upsert": - await self.cache_service.upsert(str(key), data, lock=lock) - elif operation == "get": - return await self.cache_service.get(key, lock=lock) - elif operation == "delete": - await self.cache_service.delete(key, lock=lock) - else: - if operation == "upsert": - self.cache_service.upsert(str(key), data, lock=lock) - elif operation == "get": - return self.cache_service.get(key, lock=lock) - elif operation == "delete": - self.cache_service.delete(key, lock=lock) - - async def set_cache(self, key: str, data: Any, lock: Optional[asyncio.Lock] = None) -> bool: - """ - Set the cache for a client. + async def set_cache(self, key: str, data: Any, lock: asyncio.Lock | None = None) -> bool: + """Set the cache for a client. Args: key (str): The cache key. @@ -86,12 +33,16 @@ async def set_cache(self, key: str, data: Any, lock: Optional[asyncio.Lock] = No "result": data, "type": type(data), } - await self._perform_cache_operation("upsert", key, result_dict, lock) + if isinstance(self.cache_service, AsyncBaseCacheService): + await self.cache_service.upsert(str(key), result_dict, lock=lock or self.async_cache_locks[key]) + return await self.cache_service.contains(key) + await asyncio.to_thread( + self.cache_service.upsert, str(key), result_dict, lock=lock or self._sync_cache_locks[key] + ) return key in self.cache_service - async def get_cache(self, key: str, lock: Optional[asyncio.Lock] = None) -> Any: - """ - Get the cache for a client. + async def get_cache(self, key: str, lock: asyncio.Lock | None = None) -> Any: + """Get the cache for a client. Args: key (str): The cache key. @@ -100,14 +51,17 @@ async def get_cache(self, key: str, lock: Optional[asyncio.Lock] = None) -> Any: Returns: Any: The cached data. """ - return await self._perform_cache_operation("get", key, lock=lock or self._get_lock(key)) + if isinstance(self.cache_service, AsyncBaseCacheService): + return await self.cache_service.get(key, lock=lock or self.async_cache_locks[key]) + return await asyncio.to_thread(self.cache_service.get, key, lock=lock or self._sync_cache_locks[key]) - async def clear_cache(self, key: str, lock: Optional[asyncio.Lock] = None): - """ - Clear the cache for a client. + async def clear_cache(self, key: str, lock: asyncio.Lock | None = None) -> None: + """Clear the cache for a client. Args: key (str): The cache key. lock (Optional[asyncio.Lock], optional): The lock to use for the cache operation. Defaults to None. """ - await self._perform_cache_operation("delete", key, lock=lock or self._get_lock(key)) + if isinstance(self.cache_service, AsyncBaseCacheService): + return await self.cache_service.delete(key, lock=lock or self.async_cache_locks[key]) + return await asyncio.to_thread(self.cache_service.delete, key, lock=lock or self._sync_cache_locks[key]) diff --git a/src/backend/base/langflow/services/database/factory.py b/src/backend/base/langflow/services/database/factory.py index f9c269f12caf..8e469369ee0d 100644 --- a/src/backend/base/langflow/services/database/factory.py +++ b/src/backend/base/langflow/services/database/factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.database.service import DatabaseService @@ -8,11 +10,12 @@ class DatabaseServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(DatabaseService) - def create(self, settings_service: "SettingsService"): + def create(self, settings_service: SettingsService): # Here you would have logic to create and configure a DatabaseService if not settings_service.settings.database_url: - raise ValueError("No database URL provided") + msg = "No database URL provided" + raise ValueError(msg) return DatabaseService(settings_service) diff --git a/src/backend/base/langflow/services/database/models/__init__.py b/src/backend/base/langflow/services/database/models/__init__.py index 60df9c648bc8..4419e7f1109e 100644 --- a/src/backend/base/langflow/services/database/models/__init__.py +++ b/src/backend/base/langflow/services/database/models/__init__.py @@ -2,8 +2,8 @@ from .flow import Flow from .folder import Folder from .message import MessageTable +from .transactions import TransactionTable from .user import User from .variable import Variable -from .transactions import TransactionTable -__all__ = ["Flow", "User", "ApiKey", "Variable", "Folder", "MessageTable", "TransactionTable"] +__all__ = ["ApiKey", "Flow", "Folder", "MessageTable", "TransactionTable", "User", "Variable"] diff --git a/src/backend/base/langflow/services/database/models/api_key/__init__.py b/src/backend/base/langflow/services/database/models/api_key/__init__.py index 001b0327e5f2..4dbd35026c87 100644 --- a/src/backend/base/langflow/services/database/models/api_key/__init__.py +++ b/src/backend/base/langflow/services/database/models/api_key/__init__.py @@ -1,3 +1,3 @@ -from .model import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead +from .model import ApiKey, ApiKeyCreate, ApiKeyRead, UnmaskedApiKeyRead -__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"] +__all__ = ["ApiKey", "ApiKeyCreate", "ApiKeyRead", "UnmaskedApiKeyRead"] diff --git a/src/backend/base/langflow/services/database/models/api_key/crud.py b/src/backend/base/langflow/services/database/models/api_key/crud.py index 37fe08cd15fc..faa210deaf66 100644 --- a/src/backend/base/langflow/services/database/models/api_key/crud.py +++ b/src/backend/base/langflow/services/database/models/api_key/crud.py @@ -1,22 +1,25 @@ import datetime import secrets import threading -from typing import List, Optional +from typing import TYPE_CHECKING from uuid import UUID from sqlmodel import Session, select -from sqlmodel.sql.expression import SelectOfScalar +from sqlmodel.ext.asyncio.session import AsyncSession from langflow.services.database.models.api_key import ApiKey, ApiKeyCreate, ApiKeyRead, UnmaskedApiKeyRead +if TYPE_CHECKING: + from sqlmodel.sql.expression import SelectOfScalar -def get_api_keys(session: Session, user_id: UUID) -> List[ApiKeyRead]: + +async def get_api_keys(session: AsyncSession, user_id: UUID) -> list[ApiKeyRead]: query: SelectOfScalar = select(ApiKey).where(ApiKey.user_id == user_id) - api_keys = session.exec(query).all() + api_keys = (await session.exec(query)).all() return [ApiKeyRead.model_validate(api_key) for api_key in api_keys] -def create_api_key(session: Session, api_key_create: ApiKeyCreate, user_id: UUID) -> UnmaskedApiKeyRead: +async def create_api_key(session: AsyncSession, api_key_create: ApiKeyCreate, user_id: UUID) -> UnmaskedApiKeyRead: # Generate a random API key with 32 bytes of randomness generated_api_key = f"sk-{secrets.token_urlsafe(32)}" @@ -28,25 +31,26 @@ def create_api_key(session: Session, api_key_create: ApiKeyCreate, user_id: UUID ) session.add(api_key) - session.commit() - session.refresh(api_key) + await session.commit() + await session.refresh(api_key) unmasked = UnmaskedApiKeyRead.model_validate(api_key, from_attributes=True) unmasked.api_key = generated_api_key return unmasked -def delete_api_key(session: Session, api_key_id: UUID) -> None: - api_key = session.get(ApiKey, api_key_id) +async def delete_api_key(session: AsyncSession, api_key_id: UUID) -> None: + api_key = await session.get(ApiKey, api_key_id) if api_key is None: - raise ValueError("API Key not found") - session.delete(api_key) - session.commit() + msg = "API Key not found" + raise ValueError(msg) + await session.delete(api_key) + await session.commit() -def check_key(session: Session, api_key: str) -> Optional[ApiKey]: +def check_key(session: Session, api_key: str) -> ApiKey | None: """Check if the API key is valid.""" query: SelectOfScalar = select(ApiKey).where(ApiKey.api_key == api_key) - api_key_object: Optional[ApiKey] = session.exec(query).first() + api_key_object: ApiKey | None = session.exec(query).first() if api_key_object is not None: threading.Thread( target=update_total_uses, @@ -66,7 +70,8 @@ def update_total_uses(session, api_key: ApiKey): with Session(session.get_bind()) as new_session: new_api_key = new_session.get(ApiKey, api_key.id) if new_api_key is None: - raise ValueError("API Key not found") + msg = "API Key not found" + raise ValueError(msg) new_api_key.total_uses += 1 new_api_key.last_used_at = datetime.datetime.now(datetime.timezone.utc) new_session.add(new_api_key) diff --git a/src/backend/base/langflow/services/database/models/api_key/model.py b/src/backend/base/langflow/services/database/models/api_key/model.py index 1a0baaef59f7..82c7d4ec6591 100644 --- a/src/backend/base/langflow/services/database/models/api_key/model.py +++ b/src/backend/base/langflow/services/database/models/api_key/model.py @@ -1,5 +1,5 @@ from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 from pydantic import field_validator @@ -14,15 +14,15 @@ def utc_now(): class ApiKeyBase(SQLModel): - name: Optional[str] = Field(index=True, nullable=True, default=None) - last_used_at: Optional[datetime] = Field(default=None, nullable=True) + name: str | None = Field(index=True, nullable=True, default=None) + last_used_at: datetime | None = Field(default=None, nullable=True) total_uses: int = Field(default=0) is_active: bool = Field(default=True) -class ApiKey(ApiKeyBase, table=True): # type: ignore +class ApiKey(ApiKeyBase, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - created_at: Optional[datetime] = Field( + created_at: datetime | None = Field( default=None, sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=False) ) api_key: str = Field(index=True, unique=True) @@ -35,9 +35,9 @@ class ApiKey(ApiKeyBase, table=True): # type: ignore class ApiKeyCreate(ApiKeyBase): - api_key: Optional[str] = None - user_id: Optional[UUID] = None - created_at: Optional[datetime] = Field(default_factory=utc_now) + api_key: str | None = None + user_id: UUID | None = None + created_at: datetime | None = Field(default_factory=utc_now) @field_validator("created_at", mode="before") @classmethod @@ -59,6 +59,6 @@ class ApiKeyRead(ApiKeyBase): @field_validator("api_key") @classmethod - def mask_api_key(cls, v): + def mask_api_key(cls, v) -> str: # This validator will always run, and will mask the API key return f"{v[:8]}{'*' * (len(v) - 8)}" diff --git a/src/backend/base/langflow/services/database/models/flow/model.py b/src/backend/base/langflow/services/database/models/flow/model.py index 22c2d2f904df..11092165d205 100644 --- a/src/backend/base/langflow/services/database/models/flow/model.py +++ b/src/backend/base/langflow/services/database/models/flow/model.py @@ -1,38 +1,42 @@ # Path: src/backend/langflow/services/database/models/flow/model.py import re -import warnings from datetime import datetime, timezone -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional from uuid import UUID, uuid4 import emoji -from emoji import purely_emoji # type: ignore +from emoji import purely_emoji from fastapi import HTTPException, status -from pydantic import field_serializer, field_validator -from sqlalchemy import UniqueConstraint +from loguru import logger +from pydantic import BaseModel, field_serializer, field_validator +from sqlalchemy import Text, UniqueConstraint from sqlmodel import JSON, Column, Field, Relationship, SQLModel from langflow.schema import Data -from langflow.services.database.models.vertex_builds.model import VertexBuildTable if TYPE_CHECKING: + from langflow.services.database.models import TransactionTable from langflow.services.database.models.folder import Folder from langflow.services.database.models.message import MessageTable from langflow.services.database.models.user import User - from langflow.services.database.models import TransactionTable + from langflow.services.database.models.vertex_builds.model import VertexBuildTable + +HEX_COLOR_LENGTH = 7 class FlowBase(SQLModel): name: str = Field(index=True) - description: Optional[str] = Field(index=True, nullable=True, default=None) - icon: Optional[str] = Field(default=None, nullable=True) - icon_bg_color: Optional[str] = Field(default=None, nullable=True) - data: Optional[Dict] = Field(default=None, nullable=True) - is_component: Optional[bool] = Field(default=False, nullable=True) - updated_at: Optional[datetime] = Field(default_factory=lambda: datetime.now(timezone.utc), nullable=True) - webhook: Optional[bool] = Field(default=False, nullable=True, description="Can be used on the webhook endpoint") - endpoint_name: Optional[str] = Field(default=None, nullable=True, index=True) + description: str | None = Field(default=None, sa_column=Column(Text, index=True, nullable=True)) + icon: str | None = Field(default=None, nullable=True) + icon_bg_color: str | None = Field(default=None, nullable=True) + gradient: str | None = Field(default=None, nullable=True) + data: dict | None = Field(default=None, nullable=True) + is_component: bool | None = Field(default=False, nullable=True) + updated_at: datetime | None = Field(default_factory=lambda: datetime.now(timezone.utc), nullable=True) + webhook: bool | None = Field(default=False, nullable=True, description="Can be used on the webhook endpoint") + endpoint_name: str | None = Field(default=None, nullable=True, index=True) + tags: list[str] | None = None @field_validator("endpoint_name") @classmethod @@ -52,19 +56,24 @@ def validate_endpoint_name(cls, v): return v @field_validator("icon_bg_color") + @classmethod def validate_icon_bg_color(cls, v): if v is not None and not isinstance(v, str): - raise ValueError("Icon background color must be a string") + msg = "Icon background color must be a string" + raise ValueError(msg) # validate that is is a hex color if v and not v.startswith("#"): - raise ValueError("Icon background color must start with #") + msg = "Icon background color must start with #" + raise ValueError(msg) # validate that it is a valid hex color - if v and len(v) != 7: - raise ValueError("Icon background color must be 7 characters long") + if v and len(v) != HEX_COLOR_LENGTH: + msg = "Icon background color must be 7 characters long" + raise ValueError(msg) return v @field_validator("icon") + @classmethod def validate_icon_atr(cls, v): # const emojiRegex = /\p{Emoji}/u; # const isEmoji = emojiRegex.test(data?.node?.icon!); @@ -76,15 +85,15 @@ def validate_icon_atr(cls, v): if not v.startswith(":") and not v.endswith(":"): return v - elif not v.startswith(":") or not v.endswith(":"): + if not v.startswith(":") or not v.endswith(":"): # emoji should have both starting and ending colons # so if one of them is missing, we will raise - raise ValueError(f"Invalid emoji. {v} is not a valid emoji.") + msg = f"Invalid emoji. {v} is not a valid emoji." + raise ValueError(msg) emoji_value = emoji.emojize(v, variant="emoji_type") if v == emoji_value: - warnings.warn(f"Invalid emoji. {v} is not a valid emoji.") - icon = v + logger.warning(f"Invalid emoji. {v} is not a valid emoji.") icon = emoji_value if purely_emoji(icon): @@ -92,32 +101,39 @@ def validate_icon_atr(cls, v): return icon # otherwise it should be a valid lucide icon if v is not None and not isinstance(v, str): - raise ValueError("Icon must be a string") + msg = "Icon must be a string" + raise ValueError(msg) # is should be lowercase and contain only letters and hyphens if v and not v.islower(): - raise ValueError("Icon must be lowercase") + msg = "Icon must be lowercase" + raise ValueError(msg) if v and not v.replace("-", "").isalpha(): - raise ValueError("Icon must contain only letters and hyphens") + msg = "Icon must contain only letters and hyphens" + raise ValueError(msg) return v @field_validator("data") - def validate_json(v): + @classmethod + def validate_json(cls, v): if not v: return v if not isinstance(v, dict): - raise ValueError("Flow must be a valid JSON") + msg = "Flow must be a valid JSON" + raise ValueError(msg) # noqa: TRY004 # data must contain nodes and edges - if "nodes" not in v.keys(): - raise ValueError("Flow must have nodes") - if "edges" not in v.keys(): - raise ValueError("Flow must have edges") + if "nodes" not in v: + msg = "Flow must have nodes" + raise ValueError(msg) + if "edges" not in v: + msg = "Flow must have edges" + raise ValueError(msg) return v # updated_at can be serialized to JSON @field_serializer("updated_at") - def serialize_datetime(value): + def serialize_datetime(self, value): if isinstance(value, datetime): # I'm getting 2024-05-29T17:57:17.631346 # and I want 2024-05-29T17:57:17-05:00 @@ -128,25 +144,28 @@ def serialize_datetime(value): return value @field_validator("updated_at", mode="before") + @classmethod def validate_dt(cls, v): if v is None: return v - elif isinstance(v, datetime): + if isinstance(v, datetime): return v return datetime.fromisoformat(v) -class Flow(FlowBase, table=True): # type: ignore +class Flow(FlowBase, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - user_id: Optional[UUID] = Field(index=True, foreign_key="user.id", nullable=True) + data: dict | None = Field(default=None, sa_column=Column(JSON)) + user_id: UUID | None = Field(index=True, foreign_key="user.id", nullable=True) user: "User" = Relationship(back_populates="flows") - folder_id: Optional[UUID] = Field(default=None, foreign_key="folder.id", nullable=True, index=True) + icon: str | None = Field(default=None, nullable=True) + tags: list[str] | None = Field(sa_column=Column(JSON), default=[]) + folder_id: UUID | None = Field(default=None, foreign_key="folder.id", nullable=True, index=True) folder: Optional["Folder"] = Relationship(back_populates="flows") - messages: List["MessageTable"] = Relationship(back_populates="flow") - transactions: List["TransactionTable"] = Relationship(back_populates="flow") - vertex_builds: List["VertexBuildTable"] = Relationship(back_populates="flow") + messages: list["MessageTable"] = Relationship(back_populates="flow") + transactions: list["TransactionTable"] = Relationship(back_populates="flow") + vertex_builds: list["VertexBuildTable"] = Relationship(back_populates="flow") def to_data(self): serialized = self.model_dump() @@ -157,8 +176,7 @@ def to_data(self): "description": serialized.pop("description"), "updated_at": serialized.pop("updated_at"), } - record = Data(data=data) - return record + return Data(data=data) __table_args__ = ( UniqueConstraint("user_id", "name", name="unique_flow_name"), @@ -167,22 +185,49 @@ def to_data(self): class FlowCreate(FlowBase): - user_id: Optional[UUID] = None - folder_id: Optional[UUID] = None + user_id: UUID | None = None + folder_id: UUID | None = None class FlowRead(FlowBase): id: UUID - user_id: Optional[UUID] = Field() - folder_id: Optional[UUID] = Field() + user_id: UUID | None = Field() + folder_id: UUID | None = Field() + + +class FlowHeader(BaseModel): + """Model representing a header for a flow - Without the data. + + Attributes: + ----------- + id : UUID + Unique identifier for the flow. + name : str + The name of the flow. + folder_id : UUID | None, optional + The ID of the folder containing the flow. None if not associated with a folder. + is_component : bool | None, optional + Flag indicating whether the flow is a component. + endpoint_name : str | None, optional + The name of the endpoint associated with this flow. + description : str | None, optional + A description of the flow. + """ + + id: UUID + name: str + folder_id: UUID | None = None + is_component: bool | None = None + endpoint_name: str | None = None + description: str | None = None class FlowUpdate(SQLModel): - name: Optional[str] = None - description: Optional[str] = None - data: Optional[Dict] = None - folder_id: Optional[UUID] = None - endpoint_name: Optional[str] = None + name: str | None = None + description: str | None = None + data: dict | None = None + folder_id: UUID | None = None + endpoint_name: str | None = None @field_validator("endpoint_name") @classmethod diff --git a/src/backend/base/langflow/services/database/models/flow/schema.py b/src/backend/base/langflow/services/database/models/flow/schema.py new file mode 100644 index 000000000000..aa030caa1448 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/flow/schema.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class Tags(str, Enum): + CHATBOTS = "chatbots" + AGENTS = "agents" diff --git a/src/backend/base/langflow/services/database/models/flow/utils.py b/src/backend/base/langflow/services/database/models/flow/utils.py index 02c654a243be..b2e6fa71dea4 100644 --- a/src/backend/base/langflow/services/database/models/flow/utils.py +++ b/src/backend/base/langflow/services/database/models/flow/utils.py @@ -1,38 +1,25 @@ -from typing import Optional +from typing import Annotated from fastapi import Depends -from langflow.utils.version import get_version_info from sqlmodel import Session -from sqlalchemy import delete from langflow.services.deps import get_session +from langflow.utils.version import get_version_info from .model import Flow -from .. import TransactionTable, MessageTable -from loguru import logger -def get_flow_by_id(session: Session = Depends(get_session), flow_id: Optional[str] = None) -> Flow | None: +def get_flow_by_id(session: Annotated[Session, Depends(get_session)], flow_id: str | None = None) -> Flow | None: """Get flow by id.""" - if flow_id is None: - raise ValueError("Flow id is required.") + msg = "Flow id is required." + raise ValueError(msg) return session.get(Flow, flow_id) -def delete_flow_by_id(flow_id: str, session: Session) -> None: - """Delete flow by id.""" - # Manually delete flow, transactions and messages because foreign key constraints might be disabled - session.exec(delete(Flow).where(Flow.id == flow_id)) # type: ignore - session.exec(delete(TransactionTable).where(TransactionTable.flow_id == flow_id)) # type: ignore - session.exec(delete(MessageTable).where(MessageTable.flow_id == flow_id)) # type: ignore - logger.info(f"Deleted flow {flow_id}") - - def get_webhook_component_in_flow(flow_data: dict): """Get webhook component in flow data.""" - for node in flow_data.get("nodes", []): if "Webhook" in node.get("id"): return node diff --git a/src/backend/base/langflow/services/database/models/folder/model.py b/src/backend/base/langflow/services/database/models/folder/model.py index 73ba6a6e54cc..82a2e6d56528 100644 --- a/src/backend/base/langflow/services/database/models/folder/model.py +++ b/src/backend/base/langflow/services/database/models/folder/model.py @@ -1,33 +1,30 @@ -from typing import TYPE_CHECKING, List, Optional +from typing import Optional from uuid import UUID, uuid4 -from sqlalchemy import UniqueConstraint -from sqlmodel import Field, Relationship, SQLModel +from sqlalchemy import Text, UniqueConstraint +from sqlmodel import Column, Field, Relationship, SQLModel -from langflow.services.database.models.flow.model import FlowRead - -if TYPE_CHECKING: - from langflow.services.database.models.flow.model import Flow - from langflow.services.database.models.user.model import User +from langflow.services.database.models.flow.model import Flow, FlowRead +from langflow.services.database.models.user.model import User class FolderBase(SQLModel): name: str = Field(index=True) - description: Optional[str] = Field(default=None) + description: str | None = Field(default=None, sa_column=Column(Text)) -class Folder(FolderBase, table=True): # type: ignore - id: Optional[UUID] = Field(default_factory=uuid4, primary_key=True) - parent_id: Optional[UUID] = Field(default=None, foreign_key="folder.id") +class Folder(FolderBase, table=True): # type: ignore[call-arg] + id: UUID | None = Field(default_factory=uuid4, primary_key=True) + parent_id: UUID | None = Field(default=None, foreign_key="folder.id") parent: Optional["Folder"] = Relationship( back_populates="children", - sa_relationship_kwargs=dict(remote_side="Folder.id"), + sa_relationship_kwargs={"remote_side": "Folder.id"}, ) - children: List["Folder"] = Relationship(back_populates="parent") - user_id: Optional[UUID] = Field(default=None, foreign_key="user.id") - user: "User" = Relationship(back_populates="folders") - flows: List["Flow"] = Relationship( + children: list["Folder"] = Relationship(back_populates="parent") + user_id: UUID | None = Field(default=None, foreign_key="user.id") + user: User = Relationship(back_populates="folders") + flows: list[Flow] = Relationship( back_populates="folder", sa_relationship_kwargs={"cascade": "all, delete, delete-orphan"} ) @@ -35,24 +32,24 @@ class Folder(FolderBase, table=True): # type: ignore class FolderCreate(FolderBase): - components_list: Optional[List[UUID]] = None - flows_list: Optional[List[UUID]] = None + components_list: list[UUID] | None = None + flows_list: list[UUID] | None = None class FolderRead(FolderBase): id: UUID - parent_id: Optional[UUID] = Field() + parent_id: UUID | None = Field() class FolderReadWithFlows(FolderBase): id: UUID - parent_id: Optional[UUID] = Field() - flows: List["FlowRead"] = Field(default=[]) + parent_id: UUID | None = Field() + flows: list[FlowRead] = Field(default=[]) class FolderUpdate(SQLModel): - name: Optional[str] = None - description: Optional[str] = None - parent_id: Optional[UUID] = None - components: List[UUID] = Field(default_factory=list) - flows: List[UUID] = Field(default_factory=list) + name: str | None = None + description: str | None = None + parent_id: UUID | None = None + components: list[UUID] = Field(default_factory=list) + flows: list[UUID] = Field(default_factory=list) diff --git a/src/backend/base/langflow/services/database/models/folder/pagination_model.py b/src/backend/base/langflow/services/database/models/folder/pagination_model.py new file mode 100644 index 000000000000..46dcdc687837 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/folder/pagination_model.py @@ -0,0 +1,10 @@ +from fastapi_pagination import Page + +from langflow.helpers.base_model import BaseModel +from langflow.services.database.models.flow.model import Flow +from langflow.services.database.models.folder.model import FolderRead + + +class FolderWithPaginatedFlows(BaseModel): + folder: FolderRead + flows: Page[Flow] diff --git a/src/backend/base/langflow/services/database/models/folder/utils.py b/src/backend/base/langflow/services/database/models/folder/utils.py index 446efb029397..342920123864 100644 --- a/src/backend/base/langflow/services/database/models/folder/utils.py +++ b/src/backend/base/langflow/services/database/models/folder/utils.py @@ -1,4 +1,3 @@ -from typing import TYPE_CHECKING from uuid import UUID from sqlmodel import Session, and_, select, update @@ -8,22 +7,23 @@ from .constants import DEFAULT_FOLDER_DESCRIPTION, DEFAULT_FOLDER_NAME from .model import Folder -if TYPE_CHECKING: - pass - def create_default_folder_if_it_doesnt_exist(session: Session, user_id: UUID): folder = session.exec(select(Folder).where(Folder.user_id == user_id)).first() if not folder: - folder = Folder(name=DEFAULT_FOLDER_NAME, user_id=user_id, description=DEFAULT_FOLDER_DESCRIPTION) + folder = Folder( + name=DEFAULT_FOLDER_NAME, + user_id=user_id, + description=DEFAULT_FOLDER_DESCRIPTION, + ) session.add(folder) session.commit() session.refresh(folder) session.exec( - update(Flow) # type: ignore + update(Flow) .where( and_( - Flow.folder_id == None, # type: ignore # noqa + Flow.folder_id is None, Flow.user_id == user_id, ) ) @@ -31,3 +31,10 @@ def create_default_folder_if_it_doesnt_exist(session: Session, user_id: UUID): ) session.commit() return folder + + +def get_default_folder_id(session: Session, user_id: UUID): + folder = session.exec(select(Folder).where(Folder.name == DEFAULT_FOLDER_NAME, Folder.user_id == user_id)).first() + if not folder: + folder = create_default_folder_if_it_doesnt_exist(session, user_id) + return folder.id diff --git a/src/backend/base/langflow/services/database/models/message/__init__.py b/src/backend/base/langflow/services/database/models/message/__init__.py index 8cfb2ff4f584..90d0a37c8949 100644 --- a/src/backend/base/langflow/services/database/models/message/__init__.py +++ b/src/backend/base/langflow/services/database/models/message/__init__.py @@ -1,3 +1,3 @@ -from .model import MessageTable, MessageCreate, MessageRead, MessageUpdate +from .model import MessageCreate, MessageRead, MessageTable, MessageUpdate -__all__ = ["MessageTable", "MessageCreate", "MessageRead", "MessageUpdate"] +__all__ = ["MessageCreate", "MessageRead", "MessageTable", "MessageUpdate"] diff --git a/src/backend/base/langflow/services/database/models/message/crud.py b/src/backend/base/langflow/services/database/models/message/crud.py new file mode 100644 index 000000000000..840d015ba528 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/message/crud.py @@ -0,0 +1,20 @@ +from uuid import UUID + +from langflow.services.database.models.message.model import MessageTable, MessageUpdate +from langflow.services.deps import session_scope + + +def update_message(message_id: UUID | str, message: MessageUpdate | dict): + if not isinstance(message, MessageUpdate): + message = MessageUpdate(**message) + with session_scope() as session: + db_message = session.get(MessageTable, message_id) + if not db_message: + msg = "Message not found" + raise ValueError(msg) + message_dict = message.model_dump(exclude_unset=True, exclude_none=True) + db_message.sqlmodel_update(message_dict) + session.add(db_message) + session.commit() + session.refresh(db_message) + return db_message diff --git a/src/backend/base/langflow/services/database/models/message/model.py b/src/backend/base/langflow/services/database/models/message/model.py index 0f2f675beefc..6c3008c14897 100644 --- a/src/backend/base/langflow/services/database/models/message/model.py +++ b/src/backend/base/langflow/services/database/models/message/model.py @@ -1,10 +1,15 @@ +import json from datetime import datetime, timezone -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 -from pydantic import field_validator +from pydantic import field_serializer, field_validator +from sqlalchemy import Text from sqlmodel import JSON, Column, Field, Relationship, SQLModel +from langflow.schema.content_block import ContentBlock +from langflow.schema.properties import Properties + if TYPE_CHECKING: from langflow.schema.message import Message from langflow.services.database.models.flow.model import Flow @@ -15,8 +20,14 @@ class MessageBase(SQLModel): sender: str sender_name: str session_id: str - text: str + text: str = Field(sa_column=Column(Text)) files: list[str] = Field(default_factory=list) + error: bool = Field(default=False) + edit: bool = Field(default=False) + + properties: Properties = Field(default_factory=Properties) + category: str = Field(default="message") + content_blocks: list[ContentBlock] = Field(default_factory=list) @field_validator("files", mode="before") @classmethod @@ -29,30 +40,65 @@ def validate_files(cls, value): def from_message(cls, message: "Message", flow_id: str | UUID | None = None): # first check if the record has all the required fields if message.text is None or not message.sender or not message.sender_name: - raise ValueError("The message does not have the required fields (text, sender, sender_name).") + msg = "The message does not have the required fields (text, sender, sender_name)." + raise ValueError(msg) + if message.files: + image_paths = [] + for file in message.files: + if hasattr(file, "path") and hasattr(file, "url") and file.path: + session_id = message.session_id + image_paths.append(f"{session_id}{file.path.split(session_id)[1]}") + if image_paths: + message.files = image_paths + if isinstance(message.timestamp, str): - timestamp = datetime.fromisoformat(message.timestamp) + # Convert timestamp string in format "YYYY-MM-DD HH:MM:SS UTC" to datetime + try: + timestamp = datetime.strptime(message.timestamp, "%Y-%m-%d %H:%M:%S %Z").replace(tzinfo=timezone.utc) + except ValueError: + # Fallback for ISO format if the above fails + timestamp = datetime.fromisoformat(message.timestamp).replace(tzinfo=timezone.utc) else: timestamp = message.timestamp if not flow_id and message.flow_id: flow_id = message.flow_id + # If the text is not a string, it means it could be + # async iterator so we simply add it as an empty string + message_text = "" if not isinstance(message.text, str) else message.text + + properties = ( + message.properties.model_dump_json() + if hasattr(message.properties, "model_dump_json") + else message.properties + ) + content_blocks = [] + for content_block in message.content_blocks or []: + content = content_block.model_dump_json() if hasattr(content_block, "model_dump_json") else content_block + content_blocks.append(content) + return cls( sender=message.sender, sender_name=message.sender_name, - text=message.text, + text=message_text, session_id=message.session_id, files=message.files or [], timestamp=timestamp, flow_id=flow_id, + properties=properties, + category=message.category, + content_blocks=content_blocks, ) -class MessageTable(MessageBase, table=True): # type: ignore +class MessageTable(MessageBase, table=True): # type: ignore[call-arg] __tablename__ = "message" id: UUID = Field(default_factory=uuid4, primary_key=True) - flow_id: Optional[UUID] = Field(default=None, foreign_key="flow.id") + flow_id: UUID | None = Field(default=None, foreign_key="flow.id") flow: "Flow" = Relationship(back_populates="messages") - files: List[str] = Field(sa_column=Column(JSON)) + files: list[str] = Field(sa_column=Column(JSON)) + properties: Properties = Field(default_factory=lambda: Properties().model_dump(), sa_column=Column(JSON)) # type: ignore[assignment] + category: str = Field(sa_column=Column(Text)) + content_blocks: list[ContentBlock] = Field(default_factory=list, sa_column=Column(JSON)) # type: ignore[assignment] @field_validator("flow_id", mode="before") @classmethod @@ -63,6 +109,25 @@ def validate_flow_id(cls, value): value = UUID(value) return value + @field_validator("properties", "content_blocks") + @classmethod + def validate_properties_or_content_blocks(cls, value): + if isinstance(value, list): + return [cls.validate_properties_or_content_blocks(item) for item in value] + if hasattr(value, "model_dump"): + return value.model_dump() + if isinstance(value, str): + return json.loads(value) + return value + + @field_serializer("properties", "content_blocks") + def serialize_properties_or_content_blocks(self, value) -> dict | list[dict]: + if isinstance(value, list): + return [self.serialize_properties_or_content_blocks(item) for item in value] + if hasattr(value, "model_dump"): + return value.model_dump() + return value + # Needed for Column(JSON) class Config: arbitrary_types_allowed = True @@ -70,7 +135,7 @@ class Config: class MessageRead(MessageBase): id: UUID - flow_id: Optional[UUID] = Field() + flow_id: UUID | None = Field() class MessageCreate(MessageBase): @@ -78,8 +143,10 @@ class MessageCreate(MessageBase): class MessageUpdate(SQLModel): - text: Optional[str] = None - sender: Optional[str] = None - sender_name: Optional[str] = None - session_id: Optional[str] = None - files: Optional[list[str]] = None + text: str | None = None + sender: str | None = None + sender_name: str | None = None + session_id: str | None = None + files: list[str] | None = None + edit: bool | None = None + error: bool | None = None diff --git a/src/backend/base/langflow/services/database/models/transactions/crud.py b/src/backend/base/langflow/services/database/models/transactions/crud.py index d56260c79374..006e6d9bbfbd 100644 --- a/src/backend/base/langflow/services/database/models/transactions/crud.py +++ b/src/backend/base/langflow/services/database/models/transactions/crud.py @@ -1,13 +1,15 @@ -from typing import Optional from uuid import UUID from sqlalchemy.exc import IntegrityError -from sqlmodel import Session, select, col +from sqlmodel import Session, col, select +from sqlmodel.ext.asyncio.session import AsyncSession from langflow.services.database.models.transactions.model import TransactionBase, TransactionTable -def get_transactions_by_flow_id(db: Session, flow_id: UUID, limit: Optional[int] = 1000) -> list[TransactionTable]: +async def get_transactions_by_flow_id( + db: AsyncSession, flow_id: UUID, limit: int | None = 1000 +) -> list[TransactionTable]: stmt = ( select(TransactionTable) .where(TransactionTable.flow_id == flow_id) @@ -15,8 +17,8 @@ def get_transactions_by_flow_id(db: Session, flow_id: UUID, limit: Optional[int] .limit(limit) ) - transactions = db.exec(stmt) - return [t for t in transactions] + transactions = await db.exec(stmt) + return list(transactions) def log_transaction(db: Session, transaction: TransactionBase) -> TransactionTable: @@ -24,7 +26,7 @@ def log_transaction(db: Session, transaction: TransactionBase) -> TransactionTab db.add(table) try: db.commit() - return table - except IntegrityError as e: + except IntegrityError: db.rollback() - raise e + raise + return table diff --git a/src/backend/base/langflow/services/database/models/transactions/model.py b/src/backend/base/langflow/services/database/models/transactions/model.py index 2b8978d80dc8..9722849480ba 100644 --- a/src/backend/base/langflow/services/database/models/transactions/model.py +++ b/src/backend/base/langflow/services/database/models/transactions/model.py @@ -1,22 +1,24 @@ from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 -from pydantic import field_validator +from pydantic import field_serializer, field_validator from sqlmodel import JSON, Column, Field, Relationship, SQLModel if TYPE_CHECKING: from langflow.services.database.models.flow.model import Flow +from langflow.utils.util_strings import truncate_long_strings + class TransactionBase(SQLModel): timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) vertex_id: str = Field(nullable=False) - target_id: Optional[str] = Field(default=None) - inputs: Optional[dict] = Field(default=None, sa_column=Column(JSON)) - outputs: Optional[dict] = Field(default=None, sa_column=Column(JSON)) + target_id: str | None = Field(default=None) + inputs: dict | None = Field(default=None, sa_column=Column(JSON)) + outputs: dict | None = Field(default=None, sa_column=Column(JSON)) status: str = Field(nullable=False) - error: Optional[str] = Field(default=None) + error: str | None = Field(default=None) flow_id: UUID = Field(foreign_key="flow.id") # Needed for Column(JSON) @@ -32,10 +34,14 @@ def validate_flow_id(cls, value): value = UUID(value) return value + @field_serializer("outputs") + def serialize_outputs(self, data) -> dict: + return truncate_long_strings(data) + -class TransactionTable(TransactionBase, table=True): # type: ignore +class TransactionTable(TransactionBase, table=True): # type: ignore[call-arg] __tablename__ = "transaction" - id: Optional[UUID] = Field(default_factory=uuid4, primary_key=True) + id: UUID | None = Field(default_factory=uuid4, primary_key=True) flow: "Flow" = Relationship(back_populates="transactions") diff --git a/src/backend/base/langflow/services/database/models/user/crud.py b/src/backend/base/langflow/services/database/models/user/crud.py index 5a948815eb13..f5b4f74f9d50 100644 --- a/src/backend/base/langflow/services/database/models/user/crud.py +++ b/src/backend/base/langflow/services/database/models/user/crud.py @@ -1,29 +1,28 @@ from datetime import datetime, timezone -from typing import Optional, Union from uuid import UUID -from fastapi import Depends, HTTPException, status +from fastapi import HTTPException, status +from loguru import logger from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.attributes import flag_modified from sqlmodel import Session, select from langflow.services.database.models.user.model import User, UserUpdate -from langflow.services.deps import get_session -def get_user_by_username(db: Session, username: str) -> Union[User, None]: +def get_user_by_username(db: Session, username: str) -> User | None: return db.exec(select(User).where(User.username == username)).first() -def get_user_by_id(db: Session, id: UUID) -> Union[User, None]: - return db.exec(select(User).where(User.id == id)).first() +def get_user_by_id(db: Session, user_id: UUID) -> User | None: + return db.exec(select(User).where(User.id == user_id)).first() -def update_user(user_db: Optional[User], user: UserUpdate, db: Session = Depends(get_session)) -> User: +def update_user(user_db: User | None, user: UserUpdate, db: Session) -> User: if not user_db: raise HTTPException(status_code=404, detail="User not found") - # user_db_by_username = get_user_by_username(db, user.username) # type: ignore + # user_db_by_username = get_user_by_username(db, user.username) # if user_db_by_username and user_db_by_username.id != user_id: # raise HTTPException(status_code=409, detail="Username already exists") @@ -49,10 +48,10 @@ def update_user(user_db: Optional[User], user: UserUpdate, db: Session = Depends return user_db -def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)): +def update_user_last_login_at(user_id: UUID, db: Session): try: - user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore + user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) user = get_user_by_id(db, user_id) return update_user(user, user_data, db) - except Exception: - pass + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error updating user last login at") diff --git a/src/backend/base/langflow/services/database/models/user/model.py b/src/backend/base/langflow/services/database/models/user/model.py index 0f71cfb4fa23..ea2fea8a7bea 100644 --- a/src/backend/base/langflow/services/database/models/user/model.py +++ b/src/backend/base/langflow/services/database/models/user/model.py @@ -1,31 +1,31 @@ from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 from sqlmodel import Field, Relationship, SQLModel if TYPE_CHECKING: from langflow.services.database.models.api_key import ApiKey - from langflow.services.database.models.variable import Variable from langflow.services.database.models.flow import Flow from langflow.services.database.models.folder import Folder + from langflow.services.database.models.variable import Variable -class User(SQLModel, table=True): # type: ignore +class User(SQLModel, table=True): # type: ignore[call-arg] id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) username: str = Field(index=True, unique=True) password: str = Field() - profile_image: Optional[str] = Field(default=None, nullable=True) + profile_image: str | None = Field(default=None, nullable=True) is_active: bool = Field(default=False) is_superuser: bool = Field(default=False) create_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) - last_login_at: Optional[datetime] = Field(default=None, nullable=True) + last_login_at: datetime | None = Field(default=None, nullable=True) api_keys: list["ApiKey"] = Relationship( back_populates="user", sa_relationship_kwargs={"cascade": "delete"}, ) - store_api_key: Optional[str] = Field(default=None, nullable=True) + store_api_key: str | None = Field(default=None, nullable=True) flows: list["Flow"] = Relationship(back_populates="user") variables: list["Variable"] = Relationship( back_populates="user", @@ -45,18 +45,19 @@ class UserCreate(SQLModel): class UserRead(SQLModel): id: UUID = Field(default_factory=uuid4) username: str = Field() - profile_image: Optional[str] = Field() + profile_image: str | None = Field() + store_api_key: str | None = Field(nullable=True) is_active: bool = Field() is_superuser: bool = Field() create_at: datetime = Field() updated_at: datetime = Field() - last_login_at: Optional[datetime] = Field(nullable=True) + last_login_at: datetime | None = Field(nullable=True) class UserUpdate(SQLModel): - username: Optional[str] = None - profile_image: Optional[str] = None - password: Optional[str] = None - is_active: Optional[bool] = None - is_superuser: Optional[bool] = None - last_login_at: Optional[datetime] = None + username: str | None = None + profile_image: str | None = None + password: str | None = None + is_active: bool | None = None + is_superuser: bool | None = None + last_login_at: datetime | None = None diff --git a/src/backend/base/langflow/services/database/models/variable/model.py b/src/backend/base/langflow/services/database/models/variable/model.py index 376c1c92ee74..77791cec1abf 100644 --- a/src/backend/base/langflow/services/database/models/variable/model.py +++ b/src/backend/base/langflow/services/database/models/variable/model.py @@ -1,9 +1,12 @@ from datetime import datetime, timezone -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 +from pydantic import ValidationInfo, field_validator from sqlmodel import JSON, Column, DateTime, Field, Relationship, SQLModel, func +from langflow.services.variable.constants import CREDENTIAL_TYPE + if TYPE_CHECKING: from langflow.services.database.models.user.model import User @@ -15,48 +18,56 @@ def utc_now(): class VariableBase(SQLModel): name: str = Field(description="Name of the variable") value: str = Field(description="Encrypted value of the variable") - default_fields: Optional[List[str]] = Field(sa_column=Column(JSON)) - type: Optional[str] = Field(None, description="Type of the variable") + default_fields: list[str] | None = Field(sa_column=Column(JSON)) + type: str | None = Field(None, description="Type of the variable") -class Variable(VariableBase, table=True): # type: ignore - id: Optional[UUID] = Field( +class Variable(VariableBase, table=True): # type: ignore[call-arg] + id: UUID | None = Field( default_factory=uuid4, primary_key=True, description="Unique ID for the variable", ) # name is unique per user - created_at: Optional[datetime] = Field( + created_at: datetime | None = Field( default=None, sa_column=Column(DateTime(timezone=True), server_default=func.now(), nullable=True), description="Creation time of the variable", ) - updated_at: Optional[datetime] = Field( + updated_at: datetime | None = Field( default=None, sa_column=Column(DateTime(timezone=True), nullable=True), description="Last update time of the variable", ) - default_fields: Optional[List[str]] = Field(sa_column=Column(JSON)) + default_fields: list[str] | None = Field(sa_column=Column(JSON)) # foreign key to user table user_id: UUID = Field(description="User ID associated with this variable", foreign_key="user.id") user: "User" = Relationship(back_populates="variables") class VariableCreate(VariableBase): - created_at: Optional[datetime] = Field(default_factory=utc_now, description="Creation time of the variable") + created_at: datetime | None = Field(default_factory=utc_now, description="Creation time of the variable") - updated_at: Optional[datetime] = Field(default_factory=utc_now, description="Creation time of the variable") + updated_at: datetime | None = Field(default_factory=utc_now, description="Creation time of the variable") class VariableRead(SQLModel): id: UUID - name: Optional[str] = Field(None, description="Name of the variable") - type: Optional[str] = Field(None, description="Type of the variable") - default_fields: Optional[List[str]] = Field(None, description="Default fields for the variable") + name: str | None = Field(None, description="Name of the variable") + type: str | None = Field(None, description="Type of the variable") + value: str | None = Field(None, description="Encrypted value of the variable") + default_fields: list[str] | None = Field(None, description="Default fields for the variable") + + @field_validator("value") + @classmethod + def validate_value(cls, value: str, info: ValidationInfo): + if info.data.get("type") == CREDENTIAL_TYPE: + return None + return value class VariableUpdate(SQLModel): id: UUID # Include the ID for updating - name: Optional[str] = Field(None, description="Name of the variable") - value: Optional[str] = Field(None, description="Encrypted value of the variable") - default_fields: Optional[List[str]] = Field(None, description="Default fields for the variable") + name: str | None = Field(None, description="Name of the variable") + value: str | None = Field(None, description="Encrypted value of the variable") + default_fields: list[str] | None = Field(None, description="Default fields for the variable") diff --git a/src/backend/base/langflow/services/database/models/vertex_builds/crud.py b/src/backend/base/langflow/services/database/models/vertex_builds/crud.py index 972b3d45d5f3..286640465401 100644 --- a/src/backend/base/langflow/services/database/models/vertex_builds/crud.py +++ b/src/backend/base/langflow/services/database/models/vertex_builds/crud.py @@ -1,13 +1,15 @@ -from typing import Optional from uuid import UUID from sqlalchemy.exc import IntegrityError from sqlmodel import Session, col, delete, select +from sqlmodel.ext.asyncio.session import AsyncSession from langflow.services.database.models.vertex_builds.model import VertexBuildBase, VertexBuildTable -def get_vertex_builds_by_flow_id(db: Session, flow_id: UUID, limit: Optional[int] = 1000) -> list[VertexBuildTable]: +async def get_vertex_builds_by_flow_id( + db: AsyncSession, flow_id: UUID, limit: int | None = 1000 +) -> list[VertexBuildTable]: stmt = ( select(VertexBuildTable) .where(VertexBuildTable.flow_id == flow_id) @@ -15,8 +17,8 @@ def get_vertex_builds_by_flow_id(db: Session, flow_id: UUID, limit: Optional[int .limit(limit) ) - builds = db.exec(stmt) - return [t for t in builds] + builds = await db.exec(stmt) + return list(builds) def log_vertex_build(db: Session, vertex_build: VertexBuildBase) -> VertexBuildTable: @@ -24,10 +26,10 @@ def log_vertex_build(db: Session, vertex_build: VertexBuildBase) -> VertexBuildT db.add(table) try: db.commit() - return table - except IntegrityError as e: + except IntegrityError: db.rollback() - raise e + raise + return table def delete_vertex_builds_by_flow_id(db: Session, flow_id: UUID) -> None: diff --git a/src/backend/base/langflow/services/database/models/vertex_builds/model.py b/src/backend/base/langflow/services/database/models/vertex_builds/model.py index e45a659a39f8..6302589c131b 100644 --- a/src/backend/base/langflow/services/database/models/vertex_builds/model.py +++ b/src/backend/base/langflow/services/database/models/vertex_builds/model.py @@ -1,21 +1,23 @@ from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from uuid import UUID, uuid4 -from pydantic import field_serializer, field_validator, BaseModel +from pydantic import BaseModel, field_serializer, field_validator +from sqlalchemy import Text from sqlmodel import JSON, Column, Field, Relationship, SQLModel - if TYPE_CHECKING: from langflow.services.database.models.flow.model import Flow +from langflow.utils.util_strings import truncate_long_strings + class VertexBuildBase(SQLModel): timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) id: str = Field(nullable=False) - data: Optional[dict] = Field(default=None, sa_column=Column(JSON)) - artifacts: Optional[dict] = Field(default=None, sa_column=Column(JSON)) - params: Optional[str] = Field(nullable=True) + data: dict | None = Field(default=None, sa_column=Column(JSON)) + artifacts: dict | None = Field(default=None, sa_column=Column(JSON)) + params: str | None = Field(default=None, sa_column=Column(Text, nullable=True)) valid: bool = Field(nullable=False) flow_id: UUID = Field(foreign_key="flow.id") @@ -39,10 +41,22 @@ def serialize_timestamp(cls, value): value = value.replace(tzinfo=timezone.utc) return value + @field_serializer("data") + def serialize_data(self, data: dict) -> dict: + return truncate_long_strings(data) + + @field_serializer("artifacts") + def serialize_artifacts(self, data) -> dict: + return truncate_long_strings(data) + + @field_serializer("params") + def serialize_params(self, data) -> str: + return truncate_long_strings(data) + -class VertexBuildTable(VertexBuildBase, table=True): # type: ignore +class VertexBuildTable(VertexBuildBase, table=True): # type: ignore[call-arg] __tablename__ = "vertex_build" - build_id: Optional[UUID] = Field(default_factory=uuid4, primary_key=True) + build_id: UUID | None = Field(default_factory=uuid4, primary_key=True) flow: "Flow" = Relationship(back_populates="vertex_builds") diff --git a/src/backend/base/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py index 2befcd3c7bda..22d05f2e307d 100644 --- a/src/backend/base/langflow/services/database/service.py +++ b/src/backend/base/langflow/services/database/service.py @@ -1,7 +1,12 @@ +from __future__ import annotations + +import asyncio +import sqlite3 import time -from datetime import datetime +from contextlib import asynccontextmanager, contextmanager +from datetime import datetime, timezone from pathlib import Path -from typing import TYPE_CHECKING, Optional, Type +from typing import TYPE_CHECKING import sqlalchemy as sa from alembic import command, util @@ -10,43 +15,96 @@ from sqlalchemy import event, inspect from sqlalchemy.engine import Engine from sqlalchemy.exc import OperationalError +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine from sqlmodel import Session, SQLModel, create_engine, select, text +from sqlmodel.ext.asyncio.session import AsyncSession from langflow.services.base import Service -from langflow.services.database import models # noqa +from langflow.services.database import models from langflow.services.database.models.user.crud import get_user_by_username from langflow.services.database.utils import ( Result, TableResults, - migrate_messages_from_monitor_service_to_database, - migrate_transactions_from_monitor_service_to_database, ) from langflow.services.deps import get_settings_service from langflow.services.utils import teardown_superuser if TYPE_CHECKING: - from sqlalchemy.engine import Engine - from langflow.services.settings.service import SettingsService class DatabaseService(Service): name = "database_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service if settings_service.settings.database_url is None: - raise ValueError("No database URL provided") + msg = "No database URL provided" + raise ValueError(msg) self.database_url: str = settings_service.settings.database_url + self._sanitize_database_url() # This file is in langflow.services.database.manager.py # the ini is in langflow langflow_dir = Path(__file__).parent.parent.parent self.script_location = langflow_dir / "alembic" self.alembic_cfg_path = langflow_dir / "alembic.ini" + # register the event listener for sqlite as part of this class. + # Using decorator will make the method not able to use self + event.listen(Engine, "connect", self.on_connection) self.engine = self._create_engine() + self.async_engine = self._create_async_engine() + alembic_log_file = self.settings_service.settings.alembic_log_file + + # Check if the provided path is absolute, cross-platform. + if Path(alembic_log_file).is_absolute(): + # Use the absolute path directly. + self.alembic_log_path = Path(alembic_log_file) + else: + # Construct the path using the langflow directory. + self.alembic_log_path = Path(langflow_dir) / alembic_log_file + + def reload_engine(self) -> None: + self._sanitize_database_url() + self.engine = self._create_engine() + self.async_engine = self._create_async_engine() + + def _sanitize_database_url(self): + if self.database_url.startswith("postgres://"): + self.database_url = self.database_url.replace("postgres://", "postgresql://") + logger.warning( + "Fixed postgres dialect in database URL. Replacing postgres:// with postgresql://. " + "To avoid this warning, update the database URL." + ) - def _create_engine(self) -> "Engine": + def _create_engine(self) -> Engine: """Create the engine for the database.""" + return create_engine( + self.database_url, + connect_args=self._get_connect_args(), + pool_size=self.settings_service.settings.pool_size, + max_overflow=self.settings_service.settings.max_overflow, + ) + + def _create_async_engine(self) -> AsyncEngine: + """Create the engine for the database.""" + url_components = self.database_url.split("://", maxsplit=1) + if url_components[0].startswith("sqlite"): + database_url = "sqlite+aiosqlite://" + kwargs = {} + else: + kwargs = { + "pool_size": self.settings_service.settings.pool_size, + "max_overflow": self.settings_service.settings.max_overflow, + } + database_url = "postgresql+psycopg://" if url_components[0].startswith("postgresql") else url_components[0] + database_url += url_components[1] + return create_async_engine( + database_url, + connect_args=self._get_connect_args(), + **kwargs, + ) + + def _get_connect_args(self): if self.settings_service.settings.database_url and self.settings_service.settings.database_url.startswith( "sqlite" ): @@ -56,70 +114,45 @@ def _create_engine(self) -> "Engine": } else: connect_args = {} - try: - # register the event listener for sqlite as part of this class. - # Using decorator will make the method not able to use self - event.listen(Engine, "connect", self.on_connection) - - return create_engine( - self.database_url, - connect_args=connect_args, - pool_size=self.settings_service.settings.pool_size, - max_overflow=self.settings_service.settings.max_overflow, - ) - except sa.exc.NoSuchModuleError as exc: - if "postgres" in str(exc) and not self.database_url.startswith("postgresql"): - # https://stackoverflow.com/questions/62688256/sqlalchemy-exc-nosuchmoduleerror-cant-load-plugin-sqlalchemy-dialectspostgre - self.database_url = self.database_url.replace("postgres://", "postgresql://") - logger.warning( - "Fixed postgres dialect in database URL. Replacing postgres:// with postgresql://. To avoid this warning, update the database URL." - ) - return self._create_engine() - raise RuntimeError("Error creating database engine") from exc - - def on_connection(self, dbapi_connection, connection_record): - from sqlite3 import Connection as sqliteConnection - - if isinstance(dbapi_connection, sqliteConnection): - pragmas: Optional[dict] = self.settings_service.settings.sqlite_pragmas + return connect_args + + def on_connection(self, dbapi_connection, _connection_record) -> None: + if isinstance( + dbapi_connection, sqlite3.Connection | sa.dialects.sqlite.aiosqlite.AsyncAdapt_aiosqlite_connection + ): + pragmas: dict = self.settings_service.settings.sqlite_pragmas or {} pragmas_list = [] - for key, val in pragmas.items() or {}: + for key, val in pragmas.items(): pragmas_list.append(f"PRAGMA {key} = {val}") - logger.info(f"sqlite connection, setting pragmas: {str(pragmas_list)}") + logger.info(f"sqlite connection, setting pragmas: {pragmas_list}") if pragmas_list: cursor = dbapi_connection.cursor() try: for pragma in pragmas_list: try: cursor.execute(pragma) - except OperationalError as oe: - logger.error(f"Failed to set PRAGMA {pragma}: ", {oe}) + except OperationalError: + logger.exception(f"Failed to set PRAGMA {pragma}") finally: cursor.close() - def __enter__(self): - self._session = Session(self.engine) - return self._session - - def __exit__(self, exc_type, exc_value, traceback): - if exc_type is not None: # If an exception has been raised - logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}") - self._session.rollback() - else: - self._session.commit() - self._session.close() - - def get_session(self): + @contextmanager + def with_session(self): with Session(self.engine) as session: yield session - def migrate_flows_if_auto_login(self): + @asynccontextmanager + async def with_async_session(self): + async with AsyncSession(self.async_engine) as session: + yield session + + def migrate_flows_if_auto_login(self) -> None: # if auto_login is enabled, we need to migrate the flows # to the default superuser if they don't have a user id # associated with them settings_service = get_settings_service() if settings_service.auth_settings.AUTO_LOGIN: - with Session(self.engine) as session: + with self.with_session() as session: flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all() if flows: logger.debug("Migrating flows to default superuser") @@ -127,7 +160,8 @@ def migrate_flows_if_auto_login(self): user = get_user_by_username(session, username) if not user: logger.error("Default superuser not found") - raise RuntimeError("Default superuser not found") + msg = "Default superuser not found" + raise RuntimeError(msg) for flow in flows: flow.user_id = user.id session.commit() @@ -136,7 +170,7 @@ def migrate_flows_if_auto_login(self): def check_schema_health(self) -> bool: inspector = inspect(self.engine) - model_mapping: dict[str, Type[SQLModel]] = { + model_mapping: dict[str, type[SQLModel]] = { "flow": models.Flow, "user": models.User, "apikey": models.ApiKey, @@ -166,14 +200,14 @@ def check_schema_health(self) -> bool: return True - def init_alembic(self, alembic_cfg): + def init_alembic(self, alembic_cfg) -> None: logger.info("Initializing alembic") command.ensure_version(alembic_cfg) # alembic_cfg.attributes["connection"].commit() command.upgrade(alembic_cfg, "head") logger.info("Alembic initialized") - def run_migrations(self, fix=False): + def run_migrations(self, *, fix=False) -> None: # First we need to check if alembic has been initialized # If not, we need to initialize it # if not self.script_location.exists(): # this is not the correct way to check if alembic has been initialized @@ -183,70 +217,65 @@ def run_migrations(self, fix=False): # which is a buffer # I don't want to output anything # subprocess.DEVNULL is an int - with open(self.script_location / "alembic.log", "w") as buffer: + with self.alembic_log_path.open("w", encoding="utf-8") as buffer: alembic_cfg = Config(stdout=buffer) # alembic_cfg.attributes["connection"] = session alembic_cfg.set_main_option("script_location", str(self.script_location)) alembic_cfg.set_main_option("sqlalchemy.url", self.database_url.replace("%", "%%")) should_initialize_alembic = False - with Session(self.engine) as session: + with self.with_session() as session: # If the table does not exist it throws an error # so we need to catch it try: session.exec(text("SELECT * FROM alembic_version")) - except Exception: - logger.info("Alembic not initialized") + except Exception: # noqa: BLE001 + logger.opt(exception=True).info("Alembic not initialized") should_initialize_alembic = True if should_initialize_alembic: try: self.init_alembic(alembic_cfg) except Exception as exc: - logger.error(f"Error initializing alembic: {exc}") - raise RuntimeError("Error initializing alembic") from exc + msg = "Error initializing alembic" + logger.exception(msg) + raise RuntimeError(msg) from exc else: logger.info("Alembic already initialized") logger.info(f"Running DB migrations in {self.script_location}") try: - buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n") + buffer.write(f"{datetime.now(tz=timezone.utc).astimezone().isoformat()}: Checking migrations\n") command.check(alembic_cfg) - except Exception as exc: - if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)): + except Exception as exc: # noqa: BLE001 + logger.opt(exception=True).debug("Error checking migrations") + if isinstance(exc, util.exc.CommandError | util.exc.AutogenerateDiffsDetected): command.upgrade(alembic_cfg, "head") time.sleep(3) try: - buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n") + buffer.write(f"{datetime.now(tz=timezone.utc).astimezone()}: Checking migrations\n") command.check(alembic_cfg) except util.exc.AutogenerateDiffsDetected as exc: - logger.error(f"AutogenerateDiffsDetected: {exc}") + logger.exception("Error checking migrations") if not fix: - raise RuntimeError(f"There's a mismatch between the models and the database.\n{exc}") - try: - migrate_messages_from_monitor_service_to_database(session) - except Exception as exc: - logger.error(f"Error migrating messages from monitor service to database: {exc}") - try: - migrate_transactions_from_monitor_service_to_database(session) - except Exception as exc: - logger.error(f"Error migrating transactions from monitor service to database: {exc}") + msg = f"There's a mismatch between the models and the database.\n{exc}" + raise RuntimeError(msg) from exc if fix: self.try_downgrade_upgrade_until_success(alembic_cfg) - def try_downgrade_upgrade_until_success(self, alembic_cfg, retries=5): + def try_downgrade_upgrade_until_success(self, alembic_cfg, retries=5) -> None: # Try -1 then head, if it fails, try -2 then head, etc. # until we reach the number of retries for i in range(1, retries + 1): try: command.check(alembic_cfg) break - except util.exc.AutogenerateDiffsDetected as exc: + except util.exc.AutogenerateDiffsDetected: # downgrade to base and upgrade again - logger.warning(f"AutogenerateDiffsDetected: {exc}") + logger.opt(exception=True).warning("AutogenerateDiffsDetected") command.downgrade(alembic_cfg, f"-{i}") # wait for the database to be ready time.sleep(3) @@ -272,7 +301,7 @@ def check_table(self, model): available_columns = [col["name"] for col in inspector.get_columns(table_name)] results.append(Result(name=table_name, type="table", success=True)) except sa.exc.NoSuchTableError: - logger.error(f"Missing table: {table_name}") + logger.exception(f"Missing table: {table_name}") results.append(Result(name=table_name, type="table", success=False)) for column in expected_columns: @@ -283,7 +312,7 @@ def check_table(self, model): results.append(Result(name=column, type="column", success=True)) return results - def create_db_and_tables(self): + def create_db_and_tables(self) -> None: from sqlalchemy import inspect inspector = inspect(self.engine) @@ -302,8 +331,9 @@ def create_db_and_tables(self): except OperationalError as oe: logger.warning(f"Table {table} already exists, skipping. Exception: {oe}") except Exception as exc: - logger.error(f"Error creating table {table}: {exc}") - raise RuntimeError(f"Error creating table {table}") from exc + msg = f"Error creating table {table}" + logger.exception(msg) + raise RuntimeError(msg) from exc # Now check if the required tables exist, if not, something went wrong. inspector = inspect(self.engine) @@ -312,21 +342,25 @@ def create_db_and_tables(self): if table not in table_names: logger.error("Something went wrong creating the database and tables.") logger.error("Please check your database settings.") - raise RuntimeError("Something went wrong creating the database and tables.") + msg = "Something went wrong creating the database and tables." + raise RuntimeError(msg) logger.debug("Database and tables created successfully") - async def teardown(self): + def _teardown(self) -> None: logger.debug("Tearing down database") try: settings_service = get_settings_service() # remove the default superuser if auto_login is enabled # using the SUPERUSER to get the user - with Session(self.engine) as session: + with self.with_session() as session: teardown_superuser(settings_service, session) - except Exception as exc: - logger.error(f"Error tearing down database: {exc}") + except Exception: # noqa: BLE001 + logger.exception("Error tearing down database") self.engine.dispose() - self.engine.dispose() + + async def teardown(self) -> None: + await asyncio.to_thread(self._teardown) + await self.async_engine.dispose() diff --git a/src/backend/base/langflow/services/database/utils.py b/src/backend/base/langflow/services/database/utils.py index e35f70f3bcff..7337c5778d86 100644 --- a/src/backend/base/langflow/services/database/utils.py +++ b/src/backend/base/langflow/services/database/utils.py @@ -1,109 +1,37 @@ -import json +from __future__ import annotations + from contextlib import contextmanager from dataclasses import dataclass from typing import TYPE_CHECKING from alembic.util.exc import CommandError from loguru import logger -from sqlmodel import Session, select, text - -from langflow.services.database.models import TransactionTable -from langflow.services.deps import get_monitor_service +from sqlmodel import Session, text if TYPE_CHECKING: from langflow.services.database.service import DatabaseService -from typing import Dict, List - - -def migrate_messages_from_monitor_service_to_database(session: Session) -> bool: - from langflow.schema.message import Message - from langflow.services.database.models.message import MessageTable - - try: - monitor_service = get_monitor_service() - messages_df = monitor_service.get_messages() - except Exception as e: - if "Table with name messages does not exist" in str(e): - logger.debug(f"Error retrieving messages from monitor service: {e}") - else: - logger.warning(f"Error retrieving messages from monitor service: {e}") - return False - - if messages_df.empty: - logger.info("No messages to migrate.") - return True - - original_messages: List[Dict] = messages_df.to_dict(orient="records") - - db_messages = session.exec(select(MessageTable)).all() - db_messages = [msg[0] for msg in db_messages] # type: ignore - db_msg_dict = {(msg.text, msg.timestamp.isoformat(), str(msg.flow_id), msg.session_id): msg for msg in db_messages} - # Filter out messages that already exist in the database - original_messages_filtered = [] - for message in original_messages: - key = (message["text"], message["timestamp"].isoformat(), str(message["flow_id"]), message["session_id"]) - if key not in db_msg_dict: - original_messages_filtered.append(message) - if not original_messages_filtered: - logger.info("No messages to migrate.") - return True - try: - # Bulk insert messages - session.bulk_insert_mappings( - MessageTable, # type: ignore - [MessageTable.from_message(Message(**msg)).model_dump() for msg in original_messages_filtered], - ) - session.commit() - except Exception as e: - logger.error(f"Error during message insertion: {str(e)}") - session.rollback() - return False - - # Create a dictionary for faster lookup - - all_ok = True - for orig_msg in original_messages_filtered: - key = (orig_msg["text"], orig_msg["timestamp"].isoformat(), str(orig_msg["flow_id"]), orig_msg["session_id"]) - matching_db_msg = db_msg_dict.get(key) - - if matching_db_msg is None: - logger.warning(f"Message not found in database: {orig_msg}") - all_ok = False - else: - # Validate other fields - if any(getattr(matching_db_msg, k) != v for k, v in orig_msg.items() if k != "index"): - logger.warning(f"Message mismatch in database: {orig_msg}") - all_ok = False - - if all_ok: - messages_ids = [message["index"] for message in original_messages] - monitor_service.delete_messages(messages_ids) - logger.info("Migration completed successfully. Original messages deleted.") - else: - logger.warning("Migration completed with errors. Original messages not deleted.") - - return all_ok - -def initialize_database(fix_migration: bool = False): +def initialize_database(*, fix_migration: bool = False) -> None: logger.debug("Initializing database") from langflow.services.deps import get_db_service - database_service: "DatabaseService" = get_db_service() + database_service: DatabaseService = get_db_service() try: database_service.create_db_and_tables() except Exception as exc: # if the exception involves tables already existing # we can ignore it if "already exists" not in str(exc): - logger.error(f"Error creating DB and tables: {exc}") - raise RuntimeError("Error creating DB and tables") from exc + msg = "Error creating DB and tables" + logger.exception(msg) + raise RuntimeError(msg) from exc try: database_service.check_schema_health() except Exception as exc: - logger.error(f"Error checking schema health: {exc}") - raise RuntimeError("Error checking schema health") from exc + msg = "Error checking schema health" + logger.exception(msg) + raise RuntimeError(msg) from exc try: database_service.run_migrations(fix=fix_migration) except CommandError as exc: @@ -112,7 +40,7 @@ def initialize_database(fix_migration: bool = False): if "overlaps with other requested revisions" not in str( exc ) and "Can't locate revision identified by" not in str(exc): - raise exc + raise # This means there's wrong revision in the DB # We need to delete the alembic_version table # and run the migrations again @@ -124,18 +52,18 @@ def initialize_database(fix_migration: bool = False): # if the exception involves tables already existing # we can ignore it if "already exists" not in str(exc): - logger.error(exc) - raise exc + logger.exception(exc) + raise logger.debug("Database initialized") @contextmanager -def session_getter(db_service: "DatabaseService"): +def session_getter(db_service: DatabaseService): try: session = Session(db_service.engine) yield session - except Exception as e: - logger.error("Session rollback because of exception:", e) + except Exception: + logger.exception("Session rollback because of exception") session.rollback() raise finally: @@ -153,39 +81,3 @@ class Result: class TableResults: table_name: str results: list[Result] - - -def migrate_transactions_from_monitor_service_to_database(session: Session) -> None: - try: - monitor_service = get_monitor_service() - batch = monitor_service.get_transactions() - except Exception as e: - if "Table with name transactions does not exist" in str(e): - logger.debug(f"Error retrieving transactions from monitor service: {e}") - else: - logger.warning(f"Error retrieving transactions from monitor service: {e}") - return - - if not batch: - logger.debug("No transactions to migrate.") - return - to_delete = [] - while batch: - logger.debug(f"Migrating {len(batch)} transactions") - for row in batch: - tt = TransactionTable( - flow_id=row["flow_id"], - status=row["status"], - error=row["error"], - timestamp=row["timestamp"], - vertex_id=row["vertex_id"], - inputs=json.loads(row["inputs"]) if row["inputs"] else None, - outputs=json.loads(row["outputs"]) if row["outputs"] else None, - target_id=row["target_id"], - ) - to_delete.append(row["index"]) - session.add(tt) - session.commit() - monitor_service.delete_transactions(to_delete) - batch = monitor_service.get_transactions() - logger.debug("Transactions migrations completed.") diff --git a/src/backend/base/langflow/services/deps.py b/src/backend/base/langflow/services/deps.py index 588d0ced5abc..0a7291199e67 100644 --- a/src/backend/base/langflow/services/deps.py +++ b/src/backend/base/langflow/services/deps.py @@ -1,15 +1,21 @@ -from contextlib import contextmanager -from typing import TYPE_CHECKING, Generator +from __future__ import annotations + +from contextlib import asynccontextmanager, contextmanager +from typing import TYPE_CHECKING + +from loguru import logger from langflow.services.schema import ServiceType if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Generator + from sqlmodel import Session + from sqlmodel.ext.asyncio.session import AsyncSession - from langflow.services.cache.service import CacheService + from langflow.services.cache.service import AsyncBaseCacheService, CacheService from langflow.services.chat.service import ChatService from langflow.services.database.service import DatabaseService - from langflow.services.monitor.service import MonitorService from langflow.services.plugins.service import PluginService from langflow.services.session.service import SessionService from langflow.services.settings.service import SettingsService @@ -24,11 +30,12 @@ def get_service(service_type: ServiceType, default=None): - """ - Retrieves the service instance for the given service type. + """Retrieves the service instance for the given service type. Args: service_type (ServiceType): The type of service to retrieve. + default (ServiceFactory, optional): The default ServiceFactory to use if the service is not found. + Defaults to None. Returns: Any: The service instance. @@ -37,73 +44,67 @@ def get_service(service_type: ServiceType, default=None): from langflow.services.manager import service_manager if not service_manager.factories: - #! This is a workaround to ensure that the service manager is initialized - #! Not optimal, but it works for now + # ! This is a workaround to ensure that the service manager is initialized + # ! Not optimal, but it works for now service_manager.register_factories() - return service_manager.get(service_type, default) # type: ignore + return service_manager.get(service_type, default) -def get_telemetry_service() -> "TelemetryService": - """ - Retrieves the TelemetryService instance from the service manager. +def get_telemetry_service() -> TelemetryService: + """Retrieves the TelemetryService instance from the service manager. Returns: TelemetryService: The TelemetryService instance. """ from langflow.services.telemetry.factory import TelemetryServiceFactory - return get_service(ServiceType.TELEMETRY_SERVICE, TelemetryServiceFactory()) # type: ignore + return get_service(ServiceType.TELEMETRY_SERVICE, TelemetryServiceFactory()) -def get_tracing_service() -> "TracingService": - """ - Retrieves the TracingService instance from the service manager. +def get_tracing_service() -> TracingService: + """Retrieves the TracingService instance from the service manager. Returns: TracingService: The TracingService instance. """ from langflow.services.tracing.factory import TracingServiceFactory - return get_service(ServiceType.TRACING_SERVICE, TracingServiceFactory()) # type: ignore + return get_service(ServiceType.TRACING_SERVICE, TracingServiceFactory()) -def get_state_service() -> "StateService": - """ - Retrieves the StateService instance from the service manager. +def get_state_service() -> StateService: + """Retrieves the StateService instance from the service manager. Returns: The StateService instance. """ from langflow.services.state.factory import StateServiceFactory - return get_service(ServiceType.STATE_SERVICE, StateServiceFactory()) # type: ignore + return get_service(ServiceType.STATE_SERVICE, StateServiceFactory()) -def get_socket_service() -> "SocketIOService": - """ - Get the SocketIOService instance from the service manager. +def get_socket_service() -> SocketIOService: + """Get the SocketIOService instance from the service manager. Returns: SocketIOService: The SocketIOService instance. """ - return get_service(ServiceType.SOCKETIO_SERVICE) # type: ignore + return get_service(ServiceType.SOCKETIO_SERVICE) # type: ignore[attr-defined] -def get_storage_service() -> "StorageService": - """ - Retrieves the storage service instance. +def get_storage_service() -> StorageService: + """Retrieves the storage service instance. Returns: The storage service instance. """ from langflow.services.storage.factory import StorageServiceFactory - return get_service(ServiceType.STORAGE_SERVICE, default=StorageServiceFactory()) # type: ignore + return get_service(ServiceType.STORAGE_SERVICE, default=StorageServiceFactory()) -def get_variable_service() -> "VariableService": - """ - Retrieves the VariableService instance from the service manager. +def get_variable_service() -> VariableService: + """Retrieves the VariableService instance from the service manager. Returns: The VariableService instance. @@ -111,22 +112,20 @@ def get_variable_service() -> "VariableService": """ from langflow.services.variable.factory import VariableServiceFactory - return get_service(ServiceType.VARIABLE_SERVICE, VariableServiceFactory()) # type: ignore + return get_service(ServiceType.VARIABLE_SERVICE, VariableServiceFactory()) -def get_plugins_service() -> "PluginService": - """ - Get the PluginService instance from the service manager. +def get_plugins_service() -> PluginService: + """Get the PluginService instance from the service manager. Returns: PluginService: The PluginService instance. """ - return get_service(ServiceType.PLUGIN_SERVICE) # type: ignore + return get_service(ServiceType.PLUGIN_SERVICE) # type: ignore[attr-defined] -def get_settings_service() -> "SettingsService": - """ - Retrieves the SettingsService instance. +def get_settings_service() -> SettingsService: + """Retrieves the SettingsService instance. If the service is not yet initialized, it will be initialized before returning. @@ -138,12 +137,11 @@ def get_settings_service() -> "SettingsService": """ from langflow.services.settings.factory import SettingsServiceFactory - return get_service(ServiceType.SETTINGS_SERVICE, SettingsServiceFactory()) # type: ignore + return get_service(ServiceType.SETTINGS_SERVICE, SettingsServiceFactory()) -def get_db_service() -> "DatabaseService": - """ - Retrieves the DatabaseService instance from the service manager. +def get_db_service() -> DatabaseService: + """Retrieves the DatabaseService instance from the service manager. Returns: The DatabaseService instance. @@ -151,25 +149,34 @@ def get_db_service() -> "DatabaseService": """ from langflow.services.database.factory import DatabaseServiceFactory - return get_service(ServiceType.DATABASE_SERVICE, DatabaseServiceFactory()) # type: ignore + return get_service(ServiceType.DATABASE_SERVICE, DatabaseServiceFactory()) -def get_session() -> Generator["Session", None, None]: - """ - Retrieves a session from the database service. +def get_session() -> Generator[Session, None, None]: + """Retrieves a session from the database service. Yields: Session: A session object. """ - db_service = get_db_service() - yield from db_service.get_session() + with get_db_service().with_session() as session: + yield session -@contextmanager -def session_scope(): +async def get_async_session() -> AsyncGenerator[AsyncSession, None]: + """Retrieves an async session from the database service. + + Yields: + Session: An async session object. + """ - Context manager for managing a session scope. + async with get_db_service().with_async_session() as session: + yield session + + +@contextmanager +def session_scope() -> Generator[Session, None, None]: + """Context manager for managing a session scope. This context manager is used to manage a session scope for database operations. It ensures that the session is properly committed if no exceptions occur, @@ -182,56 +189,78 @@ def session_scope(): Exception: If an error occurs during the session scope. """ - session = next(get_session()) - try: - yield session - session.commit() - except: - session.rollback() - raise - finally: - session.close() + db_service = get_db_service() + with db_service.with_session() as session: + try: + yield session + session.commit() + except Exception: + logger.exception("An error occurred during the session scope.") + session.rollback() + raise + + +@asynccontextmanager +async def async_session_scope() -> AsyncGenerator[AsyncSession, None]: + """Context manager for managing an async session scope. + + This context manager is used to manage an async session scope for database operations. + It ensures that the session is properly committed if no exceptions occur, + and rolled back if an exception is raised. + + Yields: + session: The async session object. + Raises: + Exception: If an error occurs during the session scope. -def get_cache_service() -> "CacheService": """ - Retrieves the cache service from the service manager. + db_service = get_db_service() + async with db_service.with_async_session() as session: + try: + yield session + await session.commit() + except Exception: + logger.exception("An error occurred during the session scope.") + await session.rollback() + raise + + +def get_cache_service() -> CacheService | AsyncBaseCacheService: + """Retrieves the cache service from the service manager. Returns: The cache service instance. """ from langflow.services.cache.factory import CacheServiceFactory - return get_service(ServiceType.CACHE_SERVICE, CacheServiceFactory()) # type: ignore + return get_service(ServiceType.CACHE_SERVICE, CacheServiceFactory()) -def get_session_service() -> "SessionService": - """ - Retrieves the session service from the service manager. +def get_shared_component_cache_service() -> CacheService: + """Retrieves the cache service from the service manager. Returns: - The session service instance. + The cache service instance. """ - from langflow.services.session.factory import SessionServiceFactory + from langflow.services.shared_component_cache.factory import SharedComponentCacheServiceFactory - return get_service(ServiceType.SESSION_SERVICE, SessionServiceFactory()) # type: ignore + return get_service(ServiceType.SHARED_COMPONENT_CACHE_SERVICE, SharedComponentCacheServiceFactory()) -def get_monitor_service() -> "MonitorService": - """ - Retrieves the MonitorService instance from the service manager. +def get_session_service() -> SessionService: + """Retrieves the session service from the service manager. Returns: - MonitorService: The MonitorService instance. + The session service instance. """ - from langflow.services.monitor.factory import MonitorServiceFactory + from langflow.services.session.factory import SessionServiceFactory - return get_service(ServiceType.MONITOR_SERVICE, MonitorServiceFactory()) # type: ignore + return get_service(ServiceType.SESSION_SERVICE, SessionServiceFactory()) -def get_task_service() -> "TaskService": - """ - Retrieves the TaskService instance from the service manager. +def get_task_service() -> TaskService: + """Retrieves the TaskService instance from the service manager. Returns: The TaskService instance. @@ -239,24 +268,22 @@ def get_task_service() -> "TaskService": """ from langflow.services.task.factory import TaskServiceFactory - return get_service(ServiceType.TASK_SERVICE, TaskServiceFactory()) # type: ignore + return get_service(ServiceType.TASK_SERVICE, TaskServiceFactory()) -def get_chat_service() -> "ChatService": - """ - Get the chat service instance. +def get_chat_service() -> ChatService: + """Get the chat service instance. Returns: ChatService: The chat service instance. """ - return get_service(ServiceType.CHAT_SERVICE) # type: ignore + return get_service(ServiceType.CHAT_SERVICE) -def get_store_service() -> "StoreService": - """ - Retrieves the StoreService instance from the service manager. +def get_store_service() -> StoreService: + """Retrieves the StoreService instance from the service manager. Returns: StoreService: The StoreService instance. """ - return get_service(ServiceType.STORE_SERVICE) # type: ignore + return get_service(ServiceType.STORE_SERVICE) diff --git a/src/backend/base/langflow/services/factory.py b/src/backend/base/langflow/services/factory.py index 49bea7db2475..40146fd5e2bb 100644 --- a/src/backend/base/langflow/services/factory.py +++ b/src/backend/base/langflow/services/factory.py @@ -1,6 +1,6 @@ import importlib import inspect -from typing import TYPE_CHECKING, Type, get_type_hints +from typing import TYPE_CHECKING, get_type_hints from cachetools import LRUCache, cached from loguru import logger @@ -15,7 +15,7 @@ class ServiceFactory: def __init__( self, service_class, - ): + ) -> None: self.service_class = service_class self.dependencies = infer_service_types(self, import_all_services_into_a_dict()) @@ -23,7 +23,7 @@ def create(self, *args, **kwargs) -> "Service": raise self.service_class(*args, **kwargs) -def hash_factory(factory: Type[ServiceFactory]) -> str: +def hash_factory(factory: ServiceFactory) -> str: return factory.service_class.__name__ @@ -31,15 +31,15 @@ def hash_dict(d: dict) -> str: return str(d) -def hash_infer_service_types_args(factory_class: Type[ServiceFactory], available_services=None) -> str: - factory_hash = hash_factory(factory_class) +def hash_infer_service_types_args(factory: ServiceFactory, available_services=None) -> str: + factory_hash = hash_factory(factory) services_hash = hash_dict(available_services) return f"{factory_hash}_{services_hash}" @cached(cache=LRUCache(maxsize=10), key=hash_infer_service_types_args) -def infer_service_types(factory_class: Type[ServiceFactory], available_services=None) -> list["ServiceType"]: - create_method = factory_class.create +def infer_service_types(factory: ServiceFactory, available_services=None) -> list["ServiceType"]: + create_method = factory.create type_hints = get_type_hints(create_method, globalns=available_services) service_types = [] for param_name, param_type in type_hints.items(): @@ -54,8 +54,9 @@ def infer_service_types(factory_class: Type[ServiceFactory], available_services= # Attempt to find a matching enum value service_type = ServiceType[type_name] service_types.append(service_type) - except KeyError: - raise ValueError(f"No matching ServiceType for parameter type: {param_type.__name__}") + except KeyError as e: + msg = f"No matching ServiceType for parameter type: {param_type.__name__}" + raise ValueError(msg) from e return service_types @@ -73,11 +74,15 @@ def import_all_services_into_a_dict(): service_name = ServiceType(service_type).value.replace("_service", "") module_name = f"langflow.services.{service_name}.service" module = importlib.import_module(module_name) - for name, obj in inspect.getmembers(module, inspect.isclass): - if issubclass(obj, Service) and obj is not Service: - services[name] = obj - break + services.update( + { + name: obj + for name, obj in inspect.getmembers(module, inspect.isclass) + if issubclass(obj, Service) and obj is not Service + } + ) except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not initialize services. Please check your settings.") from exc + msg = "Could not initialize services. Please check your settings." + raise RuntimeError(msg) from exc return services diff --git a/src/backend/base/langflow/services/manager.py b/src/backend/base/langflow/services/manager.py index b534f13311d8..8c66f1ae853e 100644 --- a/src/backend/base/langflow/services/manager.py +++ b/src/backend/base/langflow/services/manager.py @@ -1,7 +1,8 @@ -import asyncio +from __future__ import annotations + import importlib import inspect -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING from loguru import logger @@ -18,50 +19,39 @@ class NoFactoryRegisteredError(Exception): class ServiceManager: - """ - Manages the creation of different services. - """ + """Manages the creation of different services.""" - def __init__(self): - self.services: Dict[str, "Service"] = {} - self.factories = {} + def __init__(self) -> None: + self.services: dict[str, Service] = {} + self.factories: dict[str, ServiceFactory] = {} self.register_factories() self.keyed_lock = KeyedMemoryLockManager() - def register_factories(self): + def register_factories(self) -> None: for factory in self.get_factories(): try: self.register_factory(factory) - except Exception as exc: - logger.exception(exc) - logger.error(f"Error initializing {factory}: {exc}") + except Exception: # noqa: BLE001 + logger.exception(f"Error initializing {factory}") def register_factory( self, - service_factory: "ServiceFactory", - ): - """ - Registers a new factory with dependencies. - """ - + service_factory: ServiceFactory, + ) -> None: + """Registers a new factory with dependencies.""" service_name = service_factory.service_class.name self.factories[service_name] = service_factory - def get(self, service_name: "ServiceType", default: Optional["ServiceFactory"] = None) -> "Service": - """ - Get (or create) a service by its name. - """ - + def get(self, service_name: ServiceType, default: ServiceFactory | None = None) -> Service: + """Get (or create) a service by its name.""" with self.keyed_lock.lock(service_name): if service_name not in self.services: self._create_service(service_name, default) return self.services[service_name] - def _create_service(self, service_name: "ServiceType", default: Optional["ServiceFactory"] = None): - """ - Create a new service given its name, handling dependencies. - """ + def _create_service(self, service_name: ServiceType, default: ServiceFactory | None = None) -> None: + """Create a new service given its name, handling dependencies.""" logger.debug(f"Create service {service_name}") self._validate_service_creation(service_name, default) @@ -70,6 +60,9 @@ def _create_service(self, service_name: "ServiceType", default: Optional["Servic if factory is None and default is not None: self.register_factory(default) factory = default + if factory is None: + msg = f"No factory registered for {service_name}" + raise NoFactoryRegisteredError(msg) for dependency in factory.dependencies: if dependency not in self.services: self._create_service(dependency) @@ -81,35 +74,28 @@ def _create_service(self, service_name: "ServiceType", default: Optional["Servic self.services[service_name] = self.factories[service_name].create(**dependent_services) self.services[service_name].set_ready() - def _validate_service_creation(self, service_name: "ServiceType", default: Optional["ServiceFactory"] = None): - """ - Validate whether the service can be created. - """ + def _validate_service_creation(self, service_name: ServiceType, default: ServiceFactory | None = None) -> None: + """Validate whether the service can be created.""" if service_name not in self.factories and default is None: - raise NoFactoryRegisteredError(f"No factory registered for the service class '{service_name.name}'") + msg = f"No factory registered for the service class '{service_name.name}'" + raise NoFactoryRegisteredError(msg) - def update(self, service_name: "ServiceType"): - """ - Update a service by its name. - """ + def update(self, service_name: ServiceType) -> None: + """Update a service by its name.""" if service_name in self.services: logger.debug(f"Update service {service_name}") self.services.pop(service_name, None) self.get(service_name) - async def teardown(self): - """ - Teardown all the services. - """ + async def teardown(self) -> None: + """Teardown all the services.""" for service in self.services.values(): if service is None: continue logger.debug(f"Teardown service {service.name}") try: - result = service.teardown() - if asyncio.iscoroutine(result): - await result - except Exception as exc: + await service.teardown() + except Exception as exc: # noqa: BLE001 logger.exception(exc) self.services = {} self.factories = {} @@ -129,16 +115,15 @@ def get_factories(): module = importlib.import_module(module_name) # Find all classes in the module that are subclasses of ServiceFactory - for name, obj in inspect.getmembers(module, inspect.isclass): + for _, obj in inspect.getmembers(module, inspect.isclass): if issubclass(obj, ServiceFactory) and obj is not ServiceFactory: factories.append(obj()) break except Exception as exc: logger.exception(exc) - raise RuntimeError( - f"Could not initialize services. Please check your settings. Error in {name}." - ) from exc + msg = f"Could not initialize services. Please check your settings. Error in {name}." + raise RuntimeError(msg) from exc return factories @@ -146,21 +131,17 @@ def get_factories(): service_manager = ServiceManager() -def initialize_settings_service(): - """ - Initialize the settings manager. - """ +def initialize_settings_service() -> None: + """Initialize the settings manager.""" from langflow.services.settings import factory as settings_factory service_manager.register_factory(settings_factory.SettingsServiceFactory()) -def initialize_session_service(): - """ - Initialize the session manager. - """ +def initialize_session_service() -> None: + """Initialize the session manager.""" from langflow.services.cache import factory as cache_factory - from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.session import factory as session_service_factory initialize_settings_service() diff --git a/src/backend/base/langflow/services/monitor/factory.py b/src/backend/base/langflow/services/monitor/factory.py deleted file mode 100644 index 054e6bb3f499..000000000000 --- a/src/backend/base/langflow/services/monitor/factory.py +++ /dev/null @@ -1,13 +0,0 @@ -from langflow.services.factory import ServiceFactory -from langflow.services.monitor.service import MonitorService -from langflow.services.settings.service import SettingsService - - -class MonitorServiceFactory(ServiceFactory): - name = "monitor_service" - - def __init__(self): - super().__init__(MonitorService) - - def create(self, settings_service: SettingsService): - return self.service_class(settings_service) diff --git a/src/backend/base/langflow/services/monitor/schema.py b/src/backend/base/langflow/services/monitor/schema.py deleted file mode 100644 index 003e704c7771..000000000000 --- a/src/backend/base/langflow/services/monitor/schema.py +++ /dev/null @@ -1,258 +0,0 @@ -import json -from datetime import datetime, timezone -from typing import Any -from uuid import UUID - -from pydantic import BaseModel, Field, field_serializer, field_validator - -from langflow.schema.message import Message - - -class DefaultModel(BaseModel): - class Config: - from_attributes = True - populate_by_name = True - json_encoders = { - datetime: lambda v: v.isoformat(), - } - - def json(self, **kwargs): - # Usa a função de serialização personalizada - return super().model_dump_json(**kwargs, encoder=self.custom_encoder) - - @staticmethod - def custom_encoder(obj): - if isinstance(obj, datetime): - return obj.isoformat() - raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable") - - -class TransactionModel(DefaultModel): - index: int | None = Field(default=None) - timestamp: datetime | None = Field(default_factory=datetime.now, alias="timestamp") - vertex_id: str - target_id: str | None = None - inputs: dict - outputs: dict | None = None - status: str - error: str | None = None - flow_id: str | None = Field(default=None, alias="flow_id") - - # validate target_args in case it is a JSON - @field_validator("outputs", "inputs", mode="before") - def validate_target_args(cls, v): - if isinstance(v, str): - return json.loads(v) - return v - - @field_serializer("outputs", "inputs") - def serialize_target_args(v): - if isinstance(v, dict): - return json.dumps(v) - return v - - -class TransactionModelResponse(DefaultModel): - index: int | None = Field(default=None) - timestamp: datetime | None = Field(default_factory=datetime.now, alias="timestamp") - vertex_id: str - inputs: dict - outputs: dict | None = None - status: str - error: str | None = None - flow_id: str | None = Field(default=None, alias="flow_id") - source: str | None = None - target: str | None = None - - # validate target_args in case it is a JSON - @field_validator("outputs", "inputs", mode="before") - def validate_target_args(cls, v): - if isinstance(v, str): - return json.loads(v) - return v - - @field_validator("index", mode="before") - def validate_id(cls, v): - if isinstance(v, float): - try: - return int(v) - except ValueError: - return None - return v - - -class DuckDbMessageModel(DefaultModel): - index: int | None = Field(default=None, alias="index") - flow_id: str | None = Field(default=None, alias="flow_id") - timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) - sender: str - sender_name: str - session_id: str - text: str - files: list[str] = [] - - @field_validator("files", mode="before") - @classmethod - def validate_files(cls, v): - if isinstance(v, str): - v = json.loads(v) - return v - - @field_serializer("timestamp") - @classmethod - def serialize_timestamp(cls, v): - v = v.replace(microsecond=0) - return v.strftime("%Y-%m-%d %H:%M:%S") - - @field_serializer("files") - @classmethod - def serialize_files(cls, v): - if isinstance(v, list): - return json.dumps(v) - return v - - @classmethod - def from_message(cls, message: Message, flow_id: str | None = None): - # first check if the record has all the required fields - if message.text is None or not message.sender or not message.sender_name: - raise ValueError("The message does not have the required fields (text, sender, sender_name).") - return cls( - sender=message.sender, - sender_name=message.sender_name, - text=message.text, - session_id=message.session_id, - files=message.files or [], - timestamp=message.timestamp, - flow_id=flow_id, - ) - - -class MessageModel(DefaultModel): - id: str | UUID | None = Field(default=None) - flow_id: UUID | None = Field(default=None) - timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) - sender: str - sender_name: str - session_id: str - text: str - files: list[str] = [] - - @field_validator("files", mode="before") - @classmethod - def validate_files(cls, v): - if isinstance(v, str): - v = json.loads(v) - return v - - @field_serializer("timestamp") - @classmethod - def serialize_timestamp(cls, v): - v = v.replace(microsecond=0) - return v.strftime("%Y-%m-%d %H:%M:%S") - - @field_serializer("files") - @classmethod - def serialize_files(cls, v): - if isinstance(v, list): - return json.dumps(v) - return v - - @classmethod - def from_message(cls, message: Message, flow_id: str | None = None): - # first check if the record has all the required fields - if message.text is None or not message.sender or not message.sender_name: - raise ValueError("The message does not have the required fields (text, sender, sender_name).") - return cls( - sender=message.sender, - sender_name=message.sender_name, - text=message.text, - session_id=message.session_id, - files=message.files or [], - timestamp=message.timestamp, - flow_id=flow_id, - ) - - -class MessageModelResponse(MessageModel): - pass - - -class MessageModelRequest(MessageModel): - text: str = Field(default="") - sender: str = Field(default="") - sender_name: str = Field(default="") - session_id: str = Field(default="") - - -class VertexBuildModel(DefaultModel): - index: int | None = Field(default=None, alias="index", exclude=True) - id: str | None = Field(default=None, alias="id") - flow_id: str - valid: bool - params: Any - data: dict - artifacts: dict - timestamp: datetime = Field(default_factory=datetime.now) - - @field_serializer("data", "artifacts") - def serialize_dict(v): - if isinstance(v, dict): - # check if the value of each key is a BaseModel or a list of BaseModels - for key, value in v.items(): - if isinstance(value, BaseModel): - v[key] = value.model_dump() - elif isinstance(value, list) and all(isinstance(i, BaseModel) for i in value): - v[key] = [i.model_dump() for i in value] - return json.dumps(v, default=str) - elif isinstance(v, BaseModel): - return v.model_dump_json() - return v - - @field_validator("params", mode="before") - def validate_params(cls, v): - if isinstance(v, str): - try: - return json.loads(v) - except json.JSONDecodeError: - return v - return v - - @field_serializer("params") - def serialize_params(v): - if isinstance(v, list) and all(isinstance(i, BaseModel) for i in v): - return json.dumps([i.model_dump() for i in v]) - return v - - @field_validator("data", mode="before") - def validate_data(cls, v): - if isinstance(v, str): - return json.loads(v) - return v - - @field_validator("artifacts", mode="before") - def validate_artifacts(cls, v): - if isinstance(v, str): - return json.loads(v) - elif isinstance(v, BaseModel): - return v.model_dump() - return v - - -class VertexBuildResponseModel(VertexBuildModel): - @field_serializer("data", "artifacts") - def serialize_dict(v): - return v - - -class VertexBuildMapModel(BaseModel): - vertex_builds: dict[str, list[VertexBuildResponseModel]] - - @classmethod - def from_list_of_dicts(cls, vertex_build_dicts): - vertex_build_map = {} - for vertex_build_dict in vertex_build_dicts: - vertex_build = VertexBuildResponseModel(**vertex_build_dict) - if vertex_build.id not in vertex_build_map: - vertex_build_map[vertex_build.id] = [] - vertex_build_map[vertex_build.id].append(vertex_build) - return cls(vertex_builds=vertex_build_map) diff --git a/src/backend/base/langflow/services/monitor/service.py b/src/backend/base/langflow/services/monitor/service.py deleted file mode 100644 index 2f1b6ef4ee21..000000000000 --- a/src/backend/base/langflow/services/monitor/service.py +++ /dev/null @@ -1,91 +0,0 @@ -from pathlib import Path -from typing import TYPE_CHECKING, List - -from platformdirs import user_cache_dir - -from langflow.services.base import Service -from langflow.services.monitor.utils import ( - new_duckdb_locked_connection, -) - -if TYPE_CHECKING: - from langflow.services.settings.service import SettingsService - - -class MonitorService(Service): - """ - Deprecated. Still connecting to duckdb to migrate old installations. - """ - - name = "monitor_service" - - def __init__(self, settings_service: "SettingsService"): - self.settings_service = settings_service - self.base_cache_dir = Path(user_cache_dir("langflow"), ensure_exists=True) - self.db_path = self.base_cache_dir / "monitor.duckdb" - - def exec_query(self, query: str, read_only: bool = False): - with new_duckdb_locked_connection(self.db_path, read_only=read_only) as conn: - return conn.execute(query).df() - - def get_messages( - self, - flow_id: str | None = None, - sender: str | None = None, - sender_name: str | None = None, - session_id: str | None = None, - order_by: str | None = "timestamp", - order: str | None = "DESC", - limit: int | None = None, - ): - query = "SELECT index, flow_id, sender_name, sender, session_id, text, files, timestamp FROM messages" - conditions = [] - if sender: - conditions.append(f"sender = '{sender}'") - if sender_name: - conditions.append(f"sender_name = '{sender_name}'") - if session_id: - conditions.append(f"session_id = '{session_id}'") - if flow_id: - conditions.append(f"flow_id = '{flow_id}'") - - if conditions: - query += " WHERE " + " AND ".join(conditions) - - if order_by and order: - # Make sure the order is from newest to oldest - query += f" ORDER BY {order_by} {order.upper()}" - - if limit is not None: - query += f" LIMIT {limit}" - - with new_duckdb_locked_connection(self.db_path, read_only=True) as conn: - df = conn.execute(query).df() - - return df - - def delete_messages(self, message_ids: list[int] | str): - if isinstance(message_ids, list): - # If message_ids is a list, join the string representations of the integers - ids_str = ",".join(map(str, message_ids)) - elif isinstance(message_ids, str): - # If message_ids is already a string, use it directly - ids_str = message_ids - else: - raise ValueError("message_ids must be a list of integers or a string") - - query = f"DELETE FROM messages WHERE index IN ({ids_str})" - - return self.exec_query(query, read_only=False) - - def get_transactions(self, limit: int = 100): - query = f"SELECT index,flow_id, status, error, timestamp, vertex_id, inputs, outputs, target_id FROM transactions LIMIT {str(limit)}" - with new_duckdb_locked_connection(self.db_path, read_only=True) as conn: - df = conn.execute(query).df() - - return df.to_dict(orient="records") - - def delete_transactions(self, ids: List[int]) -> None: - with new_duckdb_locked_connection(self.db_path, read_only=False) as conn: - conn.execute(f"DELETE FROM transactions WHERE index in ({','.join(map(str, ids))})") - conn.commit() diff --git a/src/backend/base/langflow/services/monitor/utils.py b/src/backend/base/langflow/services/monitor/utils.py deleted file mode 100644 index 60c17dbe9964..000000000000 --- a/src/backend/base/langflow/services/monitor/utils.py +++ /dev/null @@ -1,125 +0,0 @@ -from contextlib import contextmanager -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Type, Union - -import duckdb -from loguru import logger -from pydantic import BaseModel - -from langflow.utils.concurrency import KeyedWorkerLockManager - -if TYPE_CHECKING: - pass - - -INDEX_KEY = "index" -worker_lock_manager = KeyedWorkerLockManager() - - -def get_table_schema_as_dict(conn: duckdb.DuckDBPyConnection, table_name: str) -> dict: - result = conn.execute(f"PRAGMA table_info('{table_name}')").fetchall() - schema = {row[1]: row[2].upper() for row in result} - return schema - - -def model_to_sql_column_definitions(model: Type[BaseModel]) -> dict: - columns = {} - for field_name, field_type in model.model_fields.items(): - if hasattr(field_type.annotation, "__args__") and field_type.annotation is not None: - field_args = field_type.annotation.__args__ - else: - field_args = [] - field_info = field_args[0] if field_args else field_type.annotation - if field_info.__name__ == "int": - sql_type = "INTEGER" - elif field_info.__name__ == "str": - sql_type = "VARCHAR" - elif field_info.__name__ == "datetime": - sql_type = "TIMESTAMP" - elif field_info.__name__ == "bool": - sql_type = "BOOLEAN" - elif field_info.__name__ == "dict": - sql_type = "VARCHAR" - elif field_info.__name__ == "Any": - sql_type = "VARCHAR" - else: - continue # Skip types we don't handle - columns[field_name] = sql_type - return columns - - -def drop_and_create_table_if_schema_mismatch(db_path: str, table_name: str, model: Type[BaseModel]): - with new_duckdb_locked_connection(db_path) as conn: - # Get the current schema from the database - try: - current_schema = get_table_schema_as_dict(conn, table_name) - except duckdb.CatalogException: - current_schema = {} - # Get the desired schema from the model - desired_schema = model_to_sql_column_definitions(model) - - # Compare the current and desired schemas - - if current_schema != desired_schema: - # If they don't match, drop the existing table and create a new one - logger.warning(f"Schema mismatch for duckdb table {table_name}. Dropping and recreating table.") - logger.debug(f"Current schema: {str(current_schema)}") - logger.debug(f"Desired schema: {str(desired_schema)}") - conn.execute(f"DROP TABLE IF EXISTS {table_name}") - if INDEX_KEY in desired_schema.keys(): - # Create a sequence for the id column - try: - conn.execute(f"CREATE SEQUENCE seq_{table_name} START 1;") - except duckdb.CatalogException: - pass - desired_schema[INDEX_KEY] = f"INTEGER PRIMARY KEY DEFAULT NEXTVAL('seq_{table_name}')" - columns_sql = ", ".join(f"{name} {data_type}" for name, data_type in desired_schema.items()) - create_table_sql = f"CREATE TABLE {table_name} ({columns_sql})" - conn.execute(create_table_sql) - - -@contextmanager -def new_duckdb_locked_connection(db_path: Union[str, Path], read_only=False): - with worker_lock_manager.lock("duckdb"): - with duckdb.connect(str(db_path), read_only=read_only) as conn: - yield conn - - -def add_row_to_table( - conn: duckdb.DuckDBPyConnection, - table_name: str, - model: Type, - monitor_data: Union[Dict[str, Any], BaseModel], -): - # Validate the data with the Pydantic model - if isinstance(monitor_data, model): - validated_data = monitor_data - else: - validated_data = model(**monitor_data) - - # Extract data for the insert statement - validated_dict = validated_data.model_dump() - keys = [key for key in validated_dict.keys() if key != INDEX_KEY] - columns = ", ".join(keys) - - values_placeholders = ", ".join(["?" for _ in keys]) - values = [validated_dict[key] for key in keys] - - # Create the insert statement - insert_sql = f"INSERT INTO {table_name} ({columns}) VALUES ({values_placeholders})" - - # Execute the insert statement - try: - conn.execute(insert_sql, values) - except Exception as e: - # Log values types - column_error_message = "" - for key, value in validated_dict.items(): - logger.error(f"{key}: {type(value)}") - if str(value) in str(e): - column_error_message = f"Column: {key} Value: {value} Error: {e}" - - if column_error_message: - logger.error(f"Error adding row to {table_name}: {column_error_message}") - else: - logger.error(f"Error adding row to {table_name}: {e}") diff --git a/src/backend/base/langflow/services/plugins/base.py b/src/backend/base/langflow/services/plugins/base.py index a5ab14e5460b..5f20db136c10 100644 --- a/src/backend/base/langflow/services/plugins/base.py +++ b/src/backend/base/langflow/services/plugins/base.py @@ -2,10 +2,10 @@ class BasePlugin: - def initialize(self): + def initialize(self) -> None: pass - def teardown(self): + def teardown(self) -> None: pass def get(self) -> Any: diff --git a/src/backend/base/langflow/services/plugins/factory.py b/src/backend/base/langflow/services/plugins/factory.py index 221d43500162..e6048d5508eb 100644 --- a/src/backend/base/langflow/services/plugins/factory.py +++ b/src/backend/base/langflow/services/plugins/factory.py @@ -1,16 +1,12 @@ -from typing import TYPE_CHECKING +from __future__ import annotations from langflow.services.factory import ServiceFactory from langflow.services.plugins.service import PluginService -if TYPE_CHECKING: - from langflow.services.settings.service import SettingsService - class PluginServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(PluginService) - def create(self, settings_service: "SettingsService"): - service = PluginService(settings_service) - return service + def create(self): + return PluginService() diff --git a/src/backend/base/langflow/services/plugins/langfuse_plugin.py b/src/backend/base/langflow/services/plugins/langfuse_plugin.py index b65c27733568..fe7389ac167b 100644 --- a/src/backend/base/langflow/services/plugins/langfuse_plugin.py +++ b/src/backend/base/langflow/services/plugins/langfuse_plugin.py @@ -1,4 +1,6 @@ -from typing import TYPE_CHECKING, Optional +from __future__ import annotations + +from typing import TYPE_CHECKING from loguru import logger @@ -6,11 +8,11 @@ from langflow.services.plugins.base import CallbackPlugin if TYPE_CHECKING: - from langfuse import Langfuse # type: ignore + from langfuse import Langfuse class LangfuseInstance: - _instance: Optional["Langfuse"] = None + _instance: Langfuse | None = None @classmethod def get(cls): @@ -20,10 +22,10 @@ def get(cls): return cls._instance @classmethod - def create(cls): + def create(cls) -> None: try: logger.debug("Creating Langfuse instance") - from langfuse import Langfuse # type: ignore + from langfuse import Langfuse settings_manager = get_settings_service() @@ -42,13 +44,13 @@ def create(cls): cls._instance = None @classmethod - def update(cls): + def update(cls) -> None: logger.debug("Updating Langfuse instance") cls._instance = None cls.create() @classmethod - def teardown(cls): + def teardown(cls) -> None: logger.debug("Tearing down Langfuse instance") if cls._instance is not None: cls._instance.flush() @@ -56,16 +58,16 @@ def teardown(cls): class LangfusePlugin(CallbackPlugin): - def initialize(self): + def initialize(self) -> None: LangfuseInstance.create() - def teardown(self): + def teardown(self) -> None: LangfuseInstance.teardown() def get(self): return LangfuseInstance.get() - def get_callback(self, _id: Optional[str] = None): + def get_callback(self, _id: str | None = None): if _id is None: _id = "default" @@ -78,7 +80,7 @@ def get_callback(self, _id: Optional[str] = None): if trace: return trace.getNewHandler() - except Exception as exc: - logger.error(f"Error initializing langfuse callback: {exc}") + except Exception: # noqa: BLE001 + logger.exception("Error initializing langfuse callback") return None diff --git a/src/backend/base/langflow/services/plugins/service.py b/src/backend/base/langflow/services/plugins/service.py index 4b08eebd5dd8..c623cfdd6d78 100644 --- a/src/backend/base/langflow/services/plugins/service.py +++ b/src/backend/base/langflow/services/plugins/service.py @@ -1,32 +1,29 @@ +from __future__ import annotations + import importlib import inspect -import os -from typing import TYPE_CHECKING, Union +from pathlib import Path from loguru import logger from langflow.services.base import Service from langflow.services.plugins.base import BasePlugin, CallbackPlugin -if TYPE_CHECKING: - from langflow.services.settings.service import SettingsService - class PluginService(Service): name = "plugin_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self) -> None: self.plugins: dict[str, BasePlugin] = {} - # plugin_dir = settings_service.settings.PLUGIN_DIR - self.plugin_dir = os.path.dirname(__file__) + self.plugin_dir = Path(__file__).parent self.plugins_base_module = "langflow.services.plugins" self.load_plugins() - def load_plugins(self): + def load_plugins(self) -> None: base_files = ["base.py", "service.py", "factory.py", "__init__.py"] - for module in os.listdir(self.plugin_dir): - if module.endswith(".py") and module not in base_files: - plugin_name = module[:-3] + for module in self.plugin_dir.iterdir(): + if module.suffix == ".py" and module.name not in base_files: + plugin_name = module.stem module_path = f"{self.plugins_base_module}.{plugin_name}" try: mod = importlib.import_module(module_path) @@ -35,17 +32,17 @@ def load_plugins(self): if ( inspect.isclass(attr) and issubclass(attr, BasePlugin) - and attr not in [CallbackPlugin, BasePlugin] + and attr not in {CallbackPlugin, BasePlugin} ): self.register_plugin(plugin_name, attr()) - except Exception as exc: - logger.error(f"Error loading plugin {plugin_name}: {exc}") + except Exception: # noqa: BLE001 + logger.exception(f"Error loading plugin {plugin_name}") - def register_plugin(self, plugin_name, plugin_instance): + def register_plugin(self, plugin_name, plugin_instance) -> None: self.plugins[plugin_name] = plugin_instance plugin_instance.initialize() - def get_plugin(self, plugin_name) -> Union[BasePlugin, None]: + def get_plugin(self, plugin_name) -> BasePlugin | None: return self.plugins.get(plugin_name) def get(self, plugin_name): @@ -53,7 +50,7 @@ def get(self, plugin_name): return plugin.get() return None - async def teardown(self): + async def teardown(self) -> None: for plugin in self.plugins.values(): plugin.teardown() diff --git a/src/backend/base/langflow/services/schema.py b/src/backend/base/langflow/services/schema.py index eafc0c08590e..482e3a7dcd3e 100644 --- a/src/backend/base/langflow/services/schema.py +++ b/src/backend/base/langflow/services/schema.py @@ -2,13 +2,11 @@ class ServiceType(str, Enum): - """ - Enum for the different types of services that can be - registered with the service manager. - """ + """Enum for the different types of services that can be registered with the service manager.""" AUTH_SERVICE = "auth_service" CACHE_SERVICE = "cache_service" + SHARED_COMPONENT_CACHE_SERVICE = "shared_component_cache_service" SETTINGS_SERVICE = "settings_service" DATABASE_SERVICE = "database_service" CHAT_SERVICE = "chat_service" @@ -18,7 +16,6 @@ class ServiceType(str, Enum): STORE_SERVICE = "store_service" VARIABLE_SERVICE = "variable_service" STORAGE_SERVICE = "storage_service" - MONITOR_SERVICE = "monitor_service" # SOCKETIO_SERVICE = "socket_service" STATE_SERVICE = "state_service" TRACING_SERVICE = "tracing_service" diff --git a/src/backend/base/langflow/services/session/factory.py b/src/backend/base/langflow/services/session/factory.py index d55bd5b46850..806067529fe6 100644 --- a/src/backend/base/langflow/services/session/factory.py +++ b/src/backend/base/langflow/services/session/factory.py @@ -8,7 +8,7 @@ class SessionServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(SessionService) def create(self, cache_service: "CacheService"): diff --git a/src/backend/base/langflow/services/session/service.py b/src/backend/base/langflow/services/session/service.py index e0feea4e0899..420ef9778f5f 100644 --- a/src/backend/base/langflow/services/session/service.py +++ b/src/backend/base/langflow/services/session/service.py @@ -1,28 +1,34 @@ -from typing import Coroutine, Optional +import asyncio +from typing import TYPE_CHECKING from langflow.services.base import Service -from langflow.services.cache.base import CacheService +from langflow.services.cache.base import AsyncBaseCacheService +from langflow.services.cache.utils import CacheMiss from langflow.services.session.utils import compute_dict_hash, session_id_generator +if TYPE_CHECKING: + from langflow.services.cache.base import CacheService + class SessionService(Service): name = "session_service" - def __init__(self, cache_service): - self.cache_service: "CacheService" = cache_service + def __init__(self, cache_service) -> None: + self.cache_service: CacheService | AsyncBaseCacheService = cache_service - async def load_session(self, key, flow_id: str, data_graph: Optional[dict] = None): + async def load_session(self, key, flow_id: str, data_graph: dict | None = None): # Check if the data is cached - if key in self.cache_service: - result = self.cache_service.get(key) - if isinstance(result, Coroutine): - result = await result - return result + if isinstance(self.cache_service, AsyncBaseCacheService): + value = await self.cache_service.get(key) + else: + value = await asyncio.to_thread(self.cache_service.get, key) + if not isinstance(value, CacheMiss): + return value if key is None: key = self.generate_key(session_id=None, data_graph=data_graph) if data_graph is None: - return (None, None) + return None, None # If not cached, build the graph and cache it from langflow.graph.graph.base import Graph @@ -32,7 +38,7 @@ async def load_session(self, key, flow_id: str, data_graph: Optional[dict] = Non return graph, artifacts - def build_key(self, session_id, data_graph): + def build_key(self, session_id, data_graph) -> str: json_hash = compute_dict_hash(data_graph) return f"{session_id}{':' if session_id else ''}{json_hash}" @@ -43,14 +49,14 @@ def generate_key(self, session_id, data_graph): session_id = session_id_generator() return self.build_key(session_id, data_graph=data_graph) - async def update_session(self, session_id, value): - result = self.cache_service.set(session_id, value) - # if it is a coroutine, await it - if isinstance(result, Coroutine): - await result - - async def clear_session(self, session_id): - result = self.cache_service.delete(session_id) - # if it is a coroutine, await it - if isinstance(result, Coroutine): - await result + async def update_session(self, session_id, value) -> None: + if isinstance(self.cache_service, AsyncBaseCacheService): + await self.cache_service.set(session_id, value) + else: + await asyncio.to_thread(self.cache_service.set, session_id, value) + + async def clear_session(self, session_id) -> None: + if isinstance(self.cache_service, AsyncBaseCacheService): + await self.cache_service.delete(session_id) + else: + await asyncio.to_thread(self.cache_service.delete, session_id) diff --git a/src/backend/base/langflow/services/settings/auth.py b/src/backend/base/langflow/services/settings/auth.py index b88ffe83200c..feceaacb689f 100644 --- a/src/backend/base/langflow/services/settings/auth.py +++ b/src/backend/base/langflow/services/settings/auth.py @@ -2,13 +2,14 @@ from pathlib import Path from typing import Literal -from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD -from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file from loguru import logger from passlib.context import CryptContext from pydantic import Field, SecretStr, field_validator from pydantic_settings import BaseSettings +from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD +from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file + class AuthSettings(BaseSettings): # Login settings @@ -56,7 +57,7 @@ class Config: extra = "ignore" env_prefix = "LANGFLOW_" - def reset_credentials(self): + def reset_credentials(self) -> None: self.SUPERUSER = DEFAULT_SUPERUSER self.SUPERUSER_PASSWORD = DEFAULT_SUPERUSER_PASSWORD @@ -109,4 +110,4 @@ def get_secret_key(cls, value, info): write_secret_to_file(secret_key_path, value) logger.debug("Saved secret key") - return value if isinstance(value, SecretStr) else SecretStr(value) + return value if isinstance(value, SecretStr) else SecretStr(value).get_secret_value() diff --git a/src/backend/base/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py index 2c953b5d14a4..073f12c677dc 100644 --- a/src/backend/base/langflow/services/settings/base.py +++ b/src/backend/base/langflow/services/settings/base.py @@ -3,7 +3,7 @@ import os from pathlib import Path from shutil import copy2 -from typing import Any, List, Literal, Optional, Tuple, Type +from typing import Any, Literal import orjson import yaml @@ -11,6 +11,7 @@ from pydantic import field_validator from pydantic.fields import FieldInfo from pydantic_settings import BaseSettings, EnvSettingsSource, PydanticBaseSettingsSource, SettingsConfigDict +from typing_extensions import override from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT @@ -19,8 +20,7 @@ def is_list_of_any(field: FieldInfo) -> bool: - """ - Check if the given field is a list or an optional list of any type. + """Check if the given field is a list or an optional list of any type. Args: field (FieldInfo): The field to be checked. @@ -31,20 +31,18 @@ def is_list_of_any(field: FieldInfo) -> bool: if field.annotation is None: return False try: - if hasattr(field.annotation, "__args__"): - union_args = field.annotation.__args__ - else: - union_args = [] + union_args = field.annotation.__args__ if hasattr(field.annotation, "__args__") else [] - return field.annotation.__origin__ == list or any( - arg.__origin__ == list for arg in union_args if hasattr(arg, "__origin__") + return field.annotation.__origin__ is list or any( + arg.__origin__ is list for arg in union_args if hasattr(arg, "__origin__") ) except AttributeError: return False class MyCustomSource(EnvSettingsSource): - def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: + @override + def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: # type: ignore[misc] # allow comma-separated list parsing # fieldInfo contains the annotation of the field @@ -59,15 +57,16 @@ def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, val class Settings(BaseSettings): # Define the default LANGFLOW_DIR - config_dir: Optional[str] = None + config_dir: str | None = None # Define if langflow db should be saved in config dir or # in the langflow directory save_db_in_config_dir: bool = False - """Define if langflow database should be saved in LANGFLOW_CONFIG_DIR or in the langflow directory (i.e. in the package directory).""" + """Define if langflow database should be saved in LANGFLOW_CONFIG_DIR or in the langflow directory + (i.e. in the package directory).""" dev: bool = False """If True, Langflow will run in development mode.""" - database_url: Optional[str] = None + database_url: str | None = None """Database URL for Langflow. If not provided, Langflow will use a SQLite database.""" pool_size: int = 10 """The number of connections to keep open in the connection pool. If not provided, the default is 10.""" @@ -75,10 +74,11 @@ class Settings(BaseSettings): """The number of connections to allow that can be opened beyond the pool size. If not provided, the default is 20.""" db_connect_timeout: int = 20 - """The number of seconds to wait before giving up on a lock to released or establishing a connection to the database.""" + """The number of seconds to wait before giving up on a lock to released or establishing a connection to the + database.""" # sqlite configuration - sqlite_pragmas: Optional[dict] = {"synchronous": "NORMAL", "journal_mode": "WAL"} + sqlite_pragmas: dict | None = {"synchronous": "NORMAL", "journal_mode": "WAL"} """SQLite pragmas to use when connecting to the database.""" # cache configuration @@ -95,34 +95,26 @@ class Settings(BaseSettings): """The port on which Langflow will expose Prometheus metrics. 9090 is the default port.""" remove_api_keys: bool = False - components_path: List[str] = [] + components_path: list[str] = [] langchain_cache: str = "InMemoryCache" - load_flows_path: Optional[str] = None + load_flows_path: str | None = None # Redis redis_host: str = "localhost" redis_port: int = 6379 redis_db: int = 0 - redis_url: Optional[str] = None + redis_url: str | None = None redis_cache_expire: int = 3600 # Sentry - sentry_dsn: Optional[str] = None - sentry_traces_sample_rate: Optional[float] = 1.0 - sentry_profiles_sample_rate: Optional[float] = 1.0 - - # PLUGIN_DIR: Optional[str] = None + sentry_dsn: str | None = None + sentry_traces_sample_rate: float | None = 1.0 + sentry_profiles_sample_rate: float | None = 1.0 - langfuse_secret_key: Optional[str] = None - langfuse_public_key: Optional[str] = None - langfuse_host: Optional[str] = None - - store: Optional[bool] = True - store_url: Optional[str] = "https://api.langflow.store" - download_webhook_url: Optional[str] = ( - "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" - ) - like_webhook_url: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" + store: bool | None = True + store_url: str | None = "https://api.langflow.store" + download_webhook_url: str | None = "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" + like_webhook_url: str | None = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" storage_type: str = "local" @@ -155,12 +147,32 @@ class Settings(BaseSettings): """If set to True, Langflow will keep track of each vertex builds (outputs) in the UI for any flow.""" # Config + host: str = "127.0.0.1" + """The host on which Langflow will run.""" + port: int = 7860 + """The port on which Langflow will run.""" + workers: int = 1 + """The number of workers to run.""" + log_level: str = "critical" + """The log level for Langflow.""" + log_file: str | None = "logs/langflow.log" + """The path to log file for Langflow.""" + alembic_log_file: str = "alembic/alembic.log" + """The path to log file for Alembic for SQLAlchemy.""" + frontend_path: str | None = None + """The path to the frontend directory containing build files. This is for development purposes only..""" + open_browser: bool = False + """If set to True, Langflow will open the browser on startup.""" auto_saving: bool = True """If set to True, Langflow will auto save flows.""" - auto_saving_interval: int = 300 + auto_saving_interval: int = 1000 """The interval in ms at which Langflow will auto save flows.""" health_check_max_retries: int = 5 """The maximum number of retries for the health check.""" + max_file_size_upload: int = 100 + """The maximum file size for the upload in MB.""" + deactivate_tracing: bool = False + """If set to True, tracing will be deactivated.""" @field_validator("dev") @classmethod @@ -181,6 +193,20 @@ def set_user_agent(cls, value): logger.debug(f"Setting user agent to {value}") return value + @field_validator("variables_to_get_from_environment", mode="before") + @classmethod + def set_variables_to_get_from_environment(cls, value): + if isinstance(value, str): + value = value.split(",") + return list(set(VARIABLES_TO_GET_FROM_ENVIRONMENT + value)) + + @field_validator("log_file", mode="before") + @classmethod + def set_log_file(cls, value): + if isinstance(value, Path): + value = str(value) + return value + @field_validator("config_dir", mode="before") @classmethod def set_langflow_dir(cls, value): @@ -219,14 +245,14 @@ def set_database_url(cls, value, info): # so we need to migrate to the new format # if there is a database in that location if not info.data["config_dir"]: - raise ValueError("config_dir not set, please set it or provide a database_url") - try: - from langflow.version import is_pre_release # type: ignore - except ImportError: - from importlib import metadata + msg = "config_dir not set, please set it or provide a database_url" + raise ValueError(msg) + + from langflow.utils.version import get_version_info + from langflow.utils.version import is_pre_release as langflow_is_pre_release - version = metadata.version("langflow-base") - is_pre_release = "a" in version or "b" in version or "rc" in version + version = get_version_info()["version"] + is_pre_release = langflow_is_pre_release(version) if info.data["save_db_in_config_dir"]: database_dir = info.data["config_dir"] @@ -255,32 +281,29 @@ def set_database_url(cls, value, info): else: logger.debug(f"Creating new database at {new_pre_path}") final_path = new_pre_path + elif Path(new_path).exists(): + logger.debug(f"Database already exists at {new_path}, using it") + final_path = new_path + elif Path(f"./{db_file_name}").exists(): + try: + logger.debug("Copying existing database to new location") + copy2(f"./{db_file_name}", new_path) + logger.debug(f"Copied existing database to {new_path}") + except Exception: # noqa: BLE001 + logger.exception("Failed to copy database, using default path") + new_path = f"./{db_file_name}" else: - if Path(new_path).exists(): - logger.debug(f"Database already exists at {new_path}, using it") - final_path = new_path - elif Path("./{db_file_name}").exists(): - try: - logger.debug("Copying existing database to new location") - copy2("./{db_file_name}", new_path) - logger.debug(f"Copied existing database to {new_path}") - except Exception: - logger.error("Failed to copy database, using default path") - new_path = "./{db_file_name}" - else: - final_path = new_path + final_path = new_path if final_path is None: - if is_pre_release: - final_path = new_pre_path - else: - final_path = new_path + final_path = new_pre_path if is_pre_release else new_path value = f"sqlite:///{final_path}" return value @field_validator("components_path", mode="before") + @classmethod def set_components_path(cls, value): if os.getenv("LANGFLOW_COMPONENTS_PATH"): logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") @@ -307,12 +330,12 @@ def set_components_path(cls, value): model_config = SettingsConfigDict(validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_") - def update_from_yaml(self, file_path: str, dev: bool = False): + def update_from_yaml(self, file_path: str, *, dev: bool = False) -> None: new_settings = load_settings_from_yaml(file_path) self.components_path = new_settings.components_path or [] self.dev = dev - def update_settings(self, **kwargs): + def update_settings(self, **kwargs) -> None: logger.debug("Updating settings") for key, value in kwargs.items(): # value may contain sensitive information, so we don't want to log it @@ -322,20 +345,19 @@ def update_settings(self, **kwargs): logger.debug(f"Updating {key}") if isinstance(getattr(self, key), list): # value might be a '[something]' string + _value = value with contextlib.suppress(json.decoder.JSONDecodeError): - value = orjson.loads(str(value)) - if isinstance(value, list): - for item in value: - if isinstance(item, Path): - item = str(item) - if item not in getattr(self, key): - getattr(self, key).append(item) + _value = orjson.loads(str(value)) + if isinstance(_value, list): + for item in _value: + _item = str(item) if isinstance(item, Path) else item + if _item not in getattr(self, key): + getattr(self, key).append(_item) logger.debug(f"Extended {key}") else: - if isinstance(value, Path): - value = str(value) - if value not in getattr(self, key): - getattr(self, key).append(value) + _value = str(_value) if isinstance(_value, Path) else _value + if _value not in getattr(self, key): + getattr(self, key).append(_value) logger.debug(f"Appended {key}") else: @@ -344,19 +366,20 @@ def update_settings(self, **kwargs): logger.debug(f"{key}: {getattr(self, key)}") @classmethod - def settings_customise_sources( + @override + def settings_customise_sources( # type: ignore[misc] cls, - settings_cls: Type[BaseSettings], + settings_cls: type[BaseSettings], init_settings: PydanticBaseSettingsSource, env_settings: PydanticBaseSettingsSource, dotenv_settings: PydanticBaseSettingsSource, file_secret_settings: PydanticBaseSettingsSource, - ) -> Tuple[PydanticBaseSettingsSource, ...]: + ) -> tuple[PydanticBaseSettingsSource, ...]: return (MyCustomSource(settings_cls),) -def save_settings_to_yaml(settings: Settings, file_path: str): - with open(file_path, "w") as f: +def save_settings_to_yaml(settings: Settings, file_path: str) -> None: + with Path(file_path).open("w", encoding="utf-8") as f: settings_dict = settings.model_dump() yaml.dump(settings_dict, f) @@ -365,17 +388,19 @@ def load_settings_from_yaml(file_path: str) -> Settings: # Check if a string is a valid path or a file name if "/" not in file_path: # Get current path - current_path = os.path.dirname(os.path.abspath(__file__)) - - file_path = os.path.join(current_path, file_path) + current_path = Path(__file__).resolve().parent + _file_path = Path(current_path) / file_path + else: + _file_path = Path(file_path) - with open(file_path, "r") as f: + with _file_path.open(encoding="utf-8") as f: settings_dict = yaml.safe_load(f) settings_dict = {k.upper(): v for k, v in settings_dict.items()} for key in settings_dict: - if key not in Settings.model_fields.keys(): - raise KeyError(f"Key {key} not found in settings") + if key not in Settings.model_fields: + msg = f"Key {key} not found in settings" + raise KeyError(msg) logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") return Settings(**settings_dict) diff --git a/src/backend/base/langflow/services/settings/constants.py b/src/backend/base/langflow/services/settings/constants.py index 8f7a97cbe65f..dd24f90a33d0 100644 --- a/src/backend/base/langflow/services/settings/constants.py +++ b/src/backend/base/langflow/services/settings/constants.py @@ -1,5 +1,5 @@ DEFAULT_SUPERUSER = "langflow" -DEFAULT_SUPERUSER_PASSWORD = "langflow" +DEFAULT_SUPERUSER_PASSWORD = "langflow" # noqa: S105 VARIABLES_TO_GET_FROM_ENVIRONMENT = [ "OPENAI_API_KEY", "ANTHROPIC_API_KEY", diff --git a/src/backend/base/langflow/services/settings/factory.py b/src/backend/base/langflow/services/settings/factory.py index 008982f2b1e9..30f3ca9c3b02 100644 --- a/src/backend/base/langflow/services/settings/factory.py +++ b/src/backend/base/langflow/services/settings/factory.py @@ -10,7 +10,7 @@ def __new__(cls): cls._instance = super().__new__(cls) return cls._instance - def __init__(self): + def __init__(self) -> None: super().__init__(SettingsService) def create(self): diff --git a/src/backend/base/langflow/services/settings/feature_flags.py b/src/backend/base/langflow/services/settings/feature_flags.py new file mode 100644 index 000000000000..a234710d41a0 --- /dev/null +++ b/src/backend/base/langflow/services/settings/feature_flags.py @@ -0,0 +1,11 @@ +from pydantic_settings import BaseSettings + + +class FeatureFlags(BaseSettings): + mvp_components: bool = False + + class Config: + env_prefix = "LANGFLOW_FEATURE_" + + +FEATURE_FLAGS = FeatureFlags() diff --git a/src/backend/base/langflow/services/settings/manager.py b/src/backend/base/langflow/services/settings/manager.py index d7d2184f3451..c5b0ea5ff048 100644 --- a/src/backend/base/langflow/services/settings/manager.py +++ b/src/backend/base/langflow/services/settings/manager.py @@ -1,4 +1,6 @@ -import os +from __future__ import annotations + +from pathlib import Path import yaml from loguru import logger @@ -17,26 +19,29 @@ def __init__(self, settings: Settings, auth_settings: AuthSettings): self.auth_settings = auth_settings @classmethod - def load_settings_from_yaml(cls, file_path: str) -> "SettingsService": + def load_settings_from_yaml(cls, file_path: str) -> SettingsService: # Check if a string is a valid path or a file name if "/" not in file_path: # Get current path - current_path = os.path.dirname(os.path.abspath(__file__)) - - file_path = os.path.join(current_path, file_path) + current_path = Path(__file__).resolve().parent + _file_path = Path(current_path) / file_path + else: + _file_path = Path(file_path) - with open(file_path, "r") as f: + with _file_path.open(encoding="utf-8") as f: settings_dict = yaml.safe_load(f) settings_dict = {k.upper(): v for k, v in settings_dict.items()} for key in settings_dict: - if key not in Settings.model_fields.keys(): - raise KeyError(f"Key {key} not found in settings") + if key not in Settings.model_fields: + msg = f"Key {key} not found in settings" + raise KeyError(msg) logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") settings = Settings(**settings_dict) if not settings.config_dir: - raise ValueError("CONFIG_DIR must be set in settings") + msg = "CONFIG_DIR must be set in settings" + raise ValueError(msg) auth_settings = AuthSettings( CONFIG_DIR=settings.config_dir, diff --git a/src/backend/base/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/service.py index b83b47387a3e..a633de6f6246 100644 --- a/src/backend/base/langflow/services/settings/service.py +++ b/src/backend/base/langflow/services/settings/service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from langflow.services.base import Service from langflow.services.settings.auth import AuthSettings from langflow.services.settings.base import Settings @@ -12,12 +14,13 @@ def __init__(self, settings: Settings, auth_settings: AuthSettings): self.auth_settings: AuthSettings = auth_settings @classmethod - def initialize(cls) -> "SettingsService": + def initialize(cls) -> SettingsService: # Check if a string is a valid path or a file name settings = Settings() if not settings.config_dir: - raise ValueError("CONFIG_DIR must be set in settings") + msg = "CONFIG_DIR must be set in settings" + raise ValueError(msg) auth_settings = AuthSettings( CONFIG_DIR=settings.config_dir, diff --git a/src/backend/base/langflow/services/settings/utils.py b/src/backend/base/langflow/services/settings/utils.py index 1fd308e724b5..b280444df70b 100644 --- a/src/backend/base/langflow/services/settings/utils.py +++ b/src/backend/base/langflow/services/settings/utils.py @@ -1,20 +1,19 @@ -import os -from pathlib import Path import platform +from pathlib import Path from loguru import logger -def set_secure_permissions(file_path): - if platform.system() in ["Linux", "Darwin"]: # Unix/Linux/Mac - os.chmod(file_path, 0o600) +def set_secure_permissions(file_path: Path) -> None: + if platform.system() in {"Linux", "Darwin"}: # Unix/Linux/Mac + file_path.chmod(0o600) elif platform.system() == "Windows": import win32api import win32con import win32security - user, domain, _ = win32security.LookupAccountName("", win32api.GetUserName()) - sd = win32security.GetFileSecurity(file_path, win32security.DACL_SECURITY_INFORMATION) + user, _, _ = win32security.LookupAccountName("", win32api.GetUserName()) + sd = win32security.GetFileSecurity(str(file_path), win32security.DACL_SECURITY_INFORMATION) dacl = win32security.ACL() # Set the new DACL for the file: read and write access for the owner, no access for everyone else @@ -24,20 +23,18 @@ def set_secure_permissions(file_path): user, ) sd.SetSecurityDescriptorDacl(1, dacl, 0) - win32security.SetFileSecurity(file_path, win32security.DACL_SECURITY_INFORMATION, sd) + win32security.SetFileSecurity(str(file_path), win32security.DACL_SECURITY_INFORMATION, sd) else: - print("Unsupported OS") + logger.error("Unsupported OS") def write_secret_to_file(path: Path, value: str) -> None: - with path.open("wb") as f: - f.write(value.encode("utf-8")) + path.write_text(value, encoding="utf-8") try: set_secure_permissions(path) - except Exception: - logger.error("Failed to set secure permissions on secret key") + except Exception: # noqa: BLE001 + logger.exception("Failed to set secure permissions on secret key") def read_secret_from_file(path: Path) -> str: - with path.open("r") as f: - return f.read() + return path.read_text(encoding="utf-8") diff --git a/src/backend/base/langflow/services/shared_component_cache/__init__.py b/src/backend/base/langflow/services/shared_component_cache/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/base/langflow/services/shared_component_cache/factory.py b/src/backend/base/langflow/services/shared_component_cache/factory.py new file mode 100644 index 000000000000..c9c464967352 --- /dev/null +++ b/src/backend/base/langflow/services/shared_component_cache/factory.py @@ -0,0 +1,15 @@ +from typing import TYPE_CHECKING + +from langflow.services.factory import ServiceFactory +from langflow.services.shared_component_cache.service import SharedComponentCacheService + +if TYPE_CHECKING: + from langflow.services.settings.service import SettingsService + + +class SharedComponentCacheServiceFactory(ServiceFactory): + def __init__(self) -> None: + super().__init__(SharedComponentCacheService) + + def create(self, settings_service: "SettingsService"): + return SharedComponentCacheService(expiration_time=settings_service.settings.cache_expire) diff --git a/src/backend/base/langflow/services/shared_component_cache/service.py b/src/backend/base/langflow/services/shared_component_cache/service.py new file mode 100644 index 000000000000..3f401cac32b5 --- /dev/null +++ b/src/backend/base/langflow/services/shared_component_cache/service.py @@ -0,0 +1,7 @@ +from langflow.services.cache import ThreadingInMemoryCache + + +class SharedComponentCacheService(ThreadingInMemoryCache): + """A caching service shared across components.""" + + name = "shared_component_cache_service" diff --git a/src/backend/base/langflow/services/socket/factory.py b/src/backend/base/langflow/services/socket/factory.py index 3ea6bb0ba0cd..68fe8337c8f5 100644 --- a/src/backend/base/langflow/services/socket/factory.py +++ b/src/backend/base/langflow/services/socket/factory.py @@ -8,7 +8,7 @@ class SocketIOFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__( service_class=SocketIOService, ) diff --git a/src/backend/base/langflow/services/socket/service.py b/src/backend/base/langflow/services/socket/service.py index 45cfc5fbcb16..8f6e44000858 100644 --- a/src/backend/base/langflow/services/socket/service.py +++ b/src/backend/base/langflow/services/socket/service.py @@ -1,23 +1,21 @@ -from typing import TYPE_CHECKING, Any +from typing import Any -import socketio # type: ignore +import socketio from loguru import logger from langflow.services.base import Service +from langflow.services.cache.base import AsyncBaseCacheService, CacheService from langflow.services.deps import get_chat_service from langflow.services.socket.utils import build_vertex, get_vertices -if TYPE_CHECKING: - from langflow.services.cache.service import CacheService - class SocketIOService(Service): name = "socket_service" - def __init__(self, cache_service: "CacheService"): + def __init__(self, cache_service: CacheService | AsyncBaseCacheService): self.cache_service = cache_service - def init(self, sio: socketio.AsyncServer): + def init(self, sio: socketio.AsyncServer) -> None: # Registering event handlers self.sio = sio if self.sio: @@ -28,53 +26,50 @@ def init(self, sio: socketio.AsyncServer): self.sio.on("build_vertex")(self.on_build_vertex) self.sessions = {} # type: dict[str, dict] - async def emit_error(self, sid, error): + async def emit_error(self, sid, error) -> None: await self.sio.emit("error", to=sid, data=error) - async def connect(self, sid, environ): + async def connect(self, sid, environ) -> None: logger.info(f"Socket connected: {sid}") self.sessions[sid] = environ - async def disconnect(self, sid): + async def disconnect(self, sid) -> None: logger.info(f"Socket disconnected: {sid}") self.sessions.pop(sid, None) - async def message(self, sid, data=None): + async def message(self, sid, data=None) -> None: # Logic for handling messages await self.emit_message(to=sid, data=data or {"foo": "bar", "baz": [1, 2, 3]}) - async def emit_message(self, to, data): + async def emit_message(self, to, data) -> None: # Abstracting sio.emit await self.sio.emit("message", to=to, data=data) - async def emit_token(self, to, data): + async def emit_token(self, to, data) -> None: await self.sio.emit("token", to=to, data=data) - async def on_get_vertices(self, sid, flow_id): + async def on_get_vertices(self, sid, flow_id) -> None: await get_vertices(self.sio, sid, flow_id, get_chat_service()) - async def on_build_vertex(self, sid, flow_id, vertex_id, tweaks, inputs): + async def on_build_vertex(self, sid, flow_id, vertex_id) -> None: await build_vertex( sio=self.sio, sid=sid, flow_id=flow_id, vertex_id=vertex_id, - tweaks=tweaks, - inputs=inputs, get_cache=self.get_cache, set_cache=self.set_cache, ) - def get_cache(self, sid: str) -> Any: - """ - Get the cache for a client. - """ - return self.cache_service.get(sid) + async def get_cache(self, sid: str) -> Any: + """Get the cache for a client.""" + value = self.cache_service.get(sid) + if isinstance(self.cache_service, AsyncBaseCacheService): + return await value + return value - def set_cache(self, sid: str, build_result: Any) -> bool: - """ - Set the cache for a client. - """ + async def set_cache(self, sid: str, build_result: Any) -> bool: + """Set the cache for a client.""" # client_id is the flow id but that already exists in the cache # so we need to change it to something else @@ -82,5 +77,8 @@ def set_cache(self, sid: str, build_result: Any) -> bool: "result": build_result, "type": type(build_result), } - self.cache_service.upsert(sid, result_dict) + result = self.cache_service.upsert(sid, result_dict) + if isinstance(self.cache_service, AsyncBaseCacheService): + await result + return await self.cache_service.contains(sid) return sid in self.cache_service diff --git a/src/backend/base/langflow/services/socket/utils.py b/src/backend/base/langflow/services/socket/utils.py index fb9533de9f04..723acd48dd5f 100644 --- a/src/backend/base/langflow/services/socket/utils.py +++ b/src/backend/base/langflow/services/socket/utils.py @@ -1,7 +1,8 @@ import time -from typing import Callable +from collections.abc import Callable -import socketio # type: ignore +import socketio +from loguru import logger from sqlmodel import select from langflow.api.utils import format_elapsed_time @@ -13,16 +14,16 @@ from langflow.services.deps import get_session -def set_socketio_server(socketio_server): +def set_socketio_server(socketio_server) -> None: from langflow.services.deps import get_socket_service socket_service = get_socket_service() socket_service.init(socketio_server) -async def get_vertices(sio, sid, flow_id, chat_service): +async def get_vertices(sio, sid, flow_id, chat_service) -> None: try: - session = get_session() + session = next(get_session()) flow: Flow = session.exec(select(Flow).where(Flow.id == flow_id)).first() if not flow or not flow.data: await sio.emit("error", data="Invalid flow ID", to=sid) @@ -30,12 +31,13 @@ async def get_vertices(sio, sid, flow_id, chat_service): graph = Graph.from_payload(flow.data) chat_service.set_cache(flow_id, graph) - vertices = graph.layered_topological_sort() + vertices = graph.layered_topological_sort(graph.vertices) # Emit the vertices to the client await sio.emit("vertices_order", data=vertices, to=sid) - except Exception as exc: + except Exception as exc: # noqa: BLE001 + logger.opt(exception=True).debug("Error getting vertices") await sio.emit("error", data=str(exc), to=sid) @@ -46,11 +48,9 @@ async def build_vertex( vertex_id: str, get_cache: Callable, set_cache: Callable, - tweaks=None, - inputs=None, -): +) -> None: try: - cache = get_cache(flow_id) + cache = await get_cache(flow_id) graph = cache.get("result") if not isinstance(graph, Graph): @@ -63,9 +63,9 @@ async def build_vertex( return start_time = time.perf_counter() try: - if isinstance(vertex, Vertex) or not vertex._built: + if isinstance(vertex, Vertex) or not vertex.built: await vertex.build(user_id=None, session_id=sid) - params = vertex._built_object_repr() + params = vertex.built_object_repr() valid = True result_dict = vertex.get_built_result() # We need to set the artifacts to pass information @@ -80,12 +80,13 @@ async def build_vertex( duration=duration, timedelta=timedelta, ) - except Exception as exc: + except Exception as exc: # noqa: BLE001 + logger.opt(exception=True).debug("Error building vertex") params = str(exc) valid = False result_dict = ResultDataResponse(results={}) artifacts = {} - set_cache(flow_id, graph) + await set_cache(flow_id, graph) log_vertex_build( flow_id=flow_id, vertex_id=vertex_id, @@ -99,5 +100,6 @@ async def build_vertex( response = VertexBuildResponse(valid=valid, params=params, id=vertex.id, data=result_dict) await sio.emit("vertex_build", data=response.model_dump(), to=sid) - except Exception as exc: + except Exception as exc: # noqa: BLE001 + logger.opt(exception=True).debug("Error building vertex") await sio.emit("error", data=str(exc), to=sid) diff --git a/src/backend/base/langflow/services/state/factory.py b/src/backend/base/langflow/services/state/factory.py index e6c5ee740e08..350d7bdcdaf9 100644 --- a/src/backend/base/langflow/services/state/factory.py +++ b/src/backend/base/langflow/services/state/factory.py @@ -4,7 +4,7 @@ class StateServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(InMemoryStateService) def create(self, settings_service: SettingsService): diff --git a/src/backend/base/langflow/services/state/service.py b/src/backend/base/langflow/services/state/service.py index b56f95148ef0..100b5442da7b 100644 --- a/src/backend/base/langflow/services/state/service.py +++ b/src/backend/base/langflow/services/state/service.py @@ -1,6 +1,6 @@ from collections import defaultdict +from collections.abc import Callable from threading import Lock -from typing import Callable from loguru import logger @@ -11,19 +11,19 @@ class StateService(Service): name = "state_service" - def append_state(self, key, new_state, run_id: str): + def append_state(self, key, new_state, run_id: str) -> None: raise NotImplementedError - def update_state(self, key, new_state, run_id: str): + def update_state(self, key, new_state, run_id: str) -> None: raise NotImplementedError def get_state(self, key, run_id: str): raise NotImplementedError - def subscribe(self, key, observer: Callable): + def subscribe(self, key, observer: Callable) -> None: raise NotImplementedError - def notify_observers(self, key, new_state): + def notify_observers(self, key, new_state) -> None: raise NotImplementedError @@ -34,7 +34,7 @@ def __init__(self, settings_service: SettingsService): self.observers: dict = defaultdict(list) self.lock = Lock() - def append_state(self, key, new_state, run_id: str): + def append_state(self, key, new_state, run_id: str) -> None: with self.lock: if run_id not in self.states: self.states[run_id] = {} @@ -45,7 +45,7 @@ def append_state(self, key, new_state, run_id: str): self.states[run_id][key].append(new_state) self.notify_append_observers(key, new_state) - def update_state(self, key, new_state, run_id: str): + def update_state(self, key, new_state, run_id: str) -> None: with self.lock: if run_id not in self.states: self.states[run_id] = {} @@ -56,19 +56,19 @@ def get_state(self, key, run_id: str): with self.lock: return self.states.get(run_id, {}).get(key, "") - def subscribe(self, key, observer: Callable): + def subscribe(self, key, observer: Callable) -> None: with self.lock: if observer not in self.observers[key]: self.observers[key].append(observer) - def notify_observers(self, key, new_state): + def notify_observers(self, key, new_state) -> None: for callback in self.observers[key]: callback(key, new_state, append=False) - def notify_append_observers(self, key, new_state): + def notify_append_observers(self, key, new_state) -> None: for callback in self.observers[key]: try: callback(key, new_state, append=True) - except Exception as e: - logger.error(f"Error in observer {callback} for key {key}: {e}") + except Exception: # noqa: BLE001 + logger.exception(f"Error in observer {callback} for key {key}") logger.warning("Callbacks not implemented yet") diff --git a/src/backend/base/langflow/services/storage/factory.py b/src/backend/base/langflow/services/storage/factory.py index ae4783f1ea05..f42a84ece07c 100644 --- a/src/backend/base/langflow/services/storage/factory.py +++ b/src/backend/base/langflow/services/storage/factory.py @@ -7,7 +7,7 @@ class StorageServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__( StorageService, ) @@ -18,12 +18,11 @@ def create(self, session_service: SessionService, settings_service: SettingsServ from .local import LocalStorageService return LocalStorageService(session_service, settings_service) - elif storage_type.lower() == "s3": + if storage_type.lower() == "s3": from .s3 import S3StorageService return S3StorageService(session_service, settings_service) - else: - logger.warning(f"Storage type {storage_type} not supported. Using local storage.") - from .local import LocalStorageService + logger.warning(f"Storage type {storage_type} not supported. Using local storage.") + from .local import LocalStorageService - return LocalStorageService(session_service, settings_service) + return LocalStorageService(session_service, settings_service) diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py index 1c3a67daf635..5a112294c692 100644 --- a/src/backend/base/langflow/services/storage/local.py +++ b/src/backend/base/langflow/services/storage/local.py @@ -1,3 +1,4 @@ +import asyncio from pathlib import Path from loguru import logger @@ -8,7 +9,7 @@ class LocalStorageService(StorageService): """A service class for handling local storage operations without aiofiles.""" - def __init__(self, session_service, settings_service): + def __init__(self, session_service, settings_service) -> None: """Initialize the local storage service with session and settings services.""" super().__init__(session_service, settings_service) self.data_dir = Path(settings_service.settings.config_dir) @@ -18,9 +19,8 @@ def build_full_path(self, flow_id: str, file_name: str) -> str: """Build the full path of a file in the local storage.""" return str(self.data_dir / flow_id / file_name) - async def save_file(self, flow_id: str, file_name: str, data: bytes): - """ - Save a file in the local storage. + async def save_file(self, flow_id: str, file_name: str, data: bytes) -> None: + """Save a file in the local storage. :param flow_id: The identifier for the flow. :param file_name: The name of the file to be saved. @@ -33,17 +33,18 @@ async def save_file(self, flow_id: str, file_name: str, data: bytes): folder_path.mkdir(parents=True, exist_ok=True) file_path = folder_path / file_name + def write_file(file_path: Path, data: bytes) -> None: + file_path.write_bytes(data) + try: - with open(file_path, "wb") as f: - f.write(data) + await asyncio.to_thread(write_file, file_path, data) logger.info(f"File {file_name} saved successfully in flow {flow_id}.") - except Exception as e: - logger.error(f"Error saving file {file_name} in flow {flow_id}: {e}") - raise e + except Exception: + logger.exception(f"Error saving file {file_name} in flow {flow_id}") + raise async def get_file(self, flow_id: str, file_name: str) -> bytes: - """ - Retrieve a file from the local storage. + """Retrieve a file from the local storage. :param flow_id: The identifier for the flow. :param file_name: The name of the file to be retrieved. @@ -53,15 +54,18 @@ async def get_file(self, flow_id: str, file_name: str) -> bytes: file_path = self.data_dir / flow_id / file_name if not file_path.exists(): logger.warning(f"File {file_name} not found in flow {flow_id}.") - raise FileNotFoundError(f"File {file_name} not found in flow {flow_id}") + msg = f"File {file_name} not found in flow {flow_id}" + raise FileNotFoundError(msg) - with open(file_path, "rb") as f: - logger.debug(f"File {file_name} retrieved successfully from flow {flow_id}.") - return f.read() + def read_file(file_path: Path) -> bytes: + return file_path.read_bytes() + + content = await asyncio.to_thread(read_file, file_path) + logger.debug(f"File {file_name} retrieved successfully from flow {flow_id}.") + return content async def list_files(self, flow_id: str): - """ - List all files in a specified flow. + """List all files in a specified flow. :param flow_id: The identifier for the flow. :return: A list of file names. @@ -70,15 +74,15 @@ async def list_files(self, flow_id: str): folder_path = self.data_dir / flow_id if not folder_path.exists() or not folder_path.is_dir(): logger.warning(f"Flow {flow_id} directory does not exist.") - raise FileNotFoundError(f"Flow {flow_id} directory does not exist.") + msg = f"Flow {flow_id} directory does not exist." + raise FileNotFoundError(msg) files = [file.name for file in folder_path.iterdir() if file.is_file()] logger.info(f"Listed {len(files)} files in flow {flow_id}.") return files - async def delete_file(self, flow_id: str, file_name: str): - """ - Delete a file from the local storage. + async def delete_file(self, flow_id: str, file_name: str) -> None: + """Delete a file from the local storage. :param flow_id: The identifier for the flow. :param file_name: The name of the file to be deleted. @@ -90,6 +94,6 @@ async def delete_file(self, flow_id: str, file_name: str): else: logger.warning(f"Attempted to delete non-existent file {file_name} in flow {flow_id}.") - async def teardown(self): + async def teardown(self) -> None: """Perform any cleanup operations when the service is being torn down.""" - pass # No specific teardown actions required for local + # No specific teardown actions required for local diff --git a/src/backend/base/langflow/services/storage/s3.py b/src/backend/base/langflow/services/storage/s3.py index 4426f377c74a..46ce643c3f1f 100644 --- a/src/backend/base/langflow/services/storage/s3.py +++ b/src/backend/base/langflow/services/storage/s3.py @@ -1,5 +1,5 @@ -import boto3 # type: ignore -from botocore.exceptions import ClientError, NoCredentialsError # type: ignore +import boto3 +from botocore.exceptions import ClientError, NoCredentialsError from loguru import logger from .service import StorageService @@ -8,16 +8,15 @@ class S3StorageService(StorageService): """A service class for handling operations with AWS S3 storage.""" - async def __init__(self, session_service, settings_service): + def __init__(self, session_service, settings_service) -> None: """Initialize the S3 storage service with session and settings services.""" super().__init__(session_service, settings_service) self.bucket = "langflow" self.s3_client = boto3.client("s3") self.set_ready() - async def save_file(self, folder: str, file_name: str, data): - """ - Save a file to the S3 bucket. + async def save_file(self, folder: str, file_name: str, data) -> None: + """Save a file to the S3 bucket. :param folder: The folder in the bucket to save the file. :param file_name: The name of the file to be saved. @@ -28,15 +27,14 @@ async def save_file(self, folder: str, file_name: str, data): self.s3_client.put_object(Bucket=self.bucket, Key=f"{folder}/{file_name}", Body=data) logger.info(f"File {file_name} saved successfully in folder {folder}.") except NoCredentialsError: - logger.error("Credentials not available for AWS S3.") + logger.exception("Credentials not available for AWS S3.") raise - except ClientError as e: - logger.error(f"Error saving file {file_name} in folder {folder}: {e}") + except ClientError: + logger.exception(f"Error saving file {file_name} in folder {folder}") raise async def get_file(self, folder: str, file_name: str): - """ - Retrieve a file from the S3 bucket. + """Retrieve a file from the S3 bucket. :param folder: The folder in the bucket where the file is stored. :param file_name: The name of the file to be retrieved. @@ -47,13 +45,12 @@ async def get_file(self, folder: str, file_name: str): response = self.s3_client.get_object(Bucket=self.bucket, Key=f"{folder}/{file_name}") logger.info(f"File {file_name} retrieved successfully from folder {folder}.") return response["Body"].read() - except ClientError as e: - logger.error(f"Error retrieving file {file_name} from folder {folder}: {e}") + except ClientError: + logger.exception(f"Error retrieving file {file_name} from folder {folder}") raise async def list_files(self, folder: str): - """ - List all files in a specified folder of the S3 bucket. + """List all files in a specified folder of the S3 bucket. :param folder: The folder in the bucket to list files from. :return: A list of file names. @@ -61,16 +58,16 @@ async def list_files(self, folder: str): """ try: response = self.s3_client.list_objects_v2(Bucket=self.bucket, Prefix=folder) - files = [item["Key"] for item in response.get("Contents", []) if "/" not in item["Key"][len(folder) :]] - logger.info(f"{len(files)} files listed in folder {folder}.") - return files - except ClientError as e: - logger.error(f"Error listing files in folder {folder}: {e}") + except ClientError: + logger.exception(f"Error listing files in folder {folder}") raise - async def delete_file(self, folder: str, file_name: str): - """ - Delete a file from the S3 bucket. + files = [item["Key"] for item in response.get("Contents", []) if "/" not in item["Key"][len(folder) :]] + logger.info(f"{len(files)} files listed in folder {folder}.") + return files + + async def delete_file(self, folder: str, file_name: str) -> None: + """Delete a file from the S3 bucket. :param folder: The folder in the bucket where the file is stored. :param file_name: The name of the file to be deleted. @@ -79,11 +76,10 @@ async def delete_file(self, folder: str, file_name: str): try: self.s3_client.delete_object(Bucket=self.bucket, Key=f"{folder}/{file_name}") logger.info(f"File {file_name} deleted successfully from folder {folder}.") - except ClientError as e: - logger.error(f"Error deleting file {file_name} from folder {folder}: {e}") + except ClientError: + logger.exception(f"Error deleting file {file_name} from folder {folder}") raise - async def teardown(self): + async def teardown(self) -> None: """Perform any cleanup operations when the service is being torn down.""" # No specific teardown actions required for S3 storage at the moment. - pass diff --git a/src/backend/base/langflow/services/storage/service.py b/src/backend/base/langflow/services/storage/service.py index 15f51d65c850..e139b73c4753 100644 --- a/src/backend/base/langflow/services/storage/service.py +++ b/src/backend/base/langflow/services/storage/service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import abstractmethod from typing import TYPE_CHECKING @@ -11,7 +13,7 @@ class StorageService(Service): name = "storage_service" - def __init__(self, session_service: "SessionService", settings_service: "SettingsService"): + def __init__(self, session_service: SessionService, settings_service: SettingsService): self.settings_service = settings_service self.session_service = session_service self.set_ready() @@ -19,7 +21,7 @@ def __init__(self, session_service: "SessionService", settings_service: "Setting def build_full_path(self, flow_id: str, file_name: str) -> str: raise NotImplementedError - def set_ready(self): + def set_ready(self) -> None: self.ready = True @abstractmethod @@ -35,8 +37,8 @@ async def list_files(self, flow_id: str) -> list[str]: raise NotImplementedError @abstractmethod - async def delete_file(self, flow_id: str, file_name: str) -> bool: + async def delete_file(self, flow_id: str, file_name: str) -> None: raise NotImplementedError - async def teardown(self): + async def teardown(self) -> None: raise NotImplementedError diff --git a/src/backend/base/langflow/services/store/exceptions.py b/src/backend/base/langflow/services/store/exceptions.py index df86d59bc64f..1c00c5a50cc1 100644 --- a/src/backend/base/langflow/services/store/exceptions.py +++ b/src/backend/base/langflow/services/store/exceptions.py @@ -1,25 +1,25 @@ -class CustomException(Exception): - def __init__(self, detail, status_code): +class CustomError(Exception): + def __init__(self, detail: str, status_code: int): super().__init__(detail) self.status_code = status_code # Define custom exceptions with status codes -class UnauthorizedError(CustomException): - def __init__(self, detail="Unauthorized access"): +class UnauthorizedError(CustomError): + def __init__(self, detail: str = "Unauthorized access"): super().__init__(detail, 401) -class ForbiddenError(CustomException): - def __init__(self, detail="Forbidden"): +class ForbiddenError(CustomError): + def __init__(self, detail: str = "Forbidden"): super().__init__(detail, 403) -class APIKeyError(CustomException): - def __init__(self, detail="API key error"): - super().__init__(detail, 400) #! Should be 401 +class APIKeyError(CustomError): + def __init__(self, detail: str = "API key error"): + super().__init__(detail, 400) # ! Should be 401 -class FilterError(CustomException): - def __init__(self, detail="Filter error"): +class FilterError(CustomError): + def __init__(self, detail: str = "Filter error"): super().__init__(detail, 400) diff --git a/src/backend/base/langflow/services/store/factory.py b/src/backend/base/langflow/services/store/factory.py index 2bdc918bde8f..0a4f18c4a2d5 100644 --- a/src/backend/base/langflow/services/store/factory.py +++ b/src/backend/base/langflow/services/store/factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.factory import ServiceFactory @@ -8,8 +10,8 @@ class StoreServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(StoreService) - def create(self, settings_service: "SettingsService"): + def create(self, settings_service: SettingsService): return StoreService(settings_service) diff --git a/src/backend/base/langflow/services/store/schema.py b/src/backend/base/langflow/services/store/schema.py index 0c37e1166fbe..3a2bc8fc989a 100644 --- a/src/backend/base/langflow/services/store/schema.py +++ b/src/backend/base/langflow/services/store/schema.py @@ -1,4 +1,3 @@ -from typing import List, Optional from uuid import UUID from pydantic import BaseModel, field_validator @@ -6,12 +5,12 @@ class TagResponse(BaseModel): id: UUID - name: Optional[str] + name: str | None class UsersLikesResponse(BaseModel): - likes_count: Optional[int] - liked_by_user: Optional[bool] + likes_count: int | None + liked_by_user: bool | None class CreateComponentResponse(BaseModel): @@ -19,22 +18,22 @@ class CreateComponentResponse(BaseModel): class TagsIdResponse(BaseModel): - tags_id: Optional[TagResponse] + tags_id: TagResponse | None class ListComponentResponse(BaseModel): - id: Optional[UUID] = None - name: Optional[str] = None - description: Optional[str] = None - liked_by_count: Optional[int] = None - liked_by_user: Optional[bool] = None - is_component: Optional[bool] = None - metadata: Optional[dict] = {} - user_created: Optional[dict] = {} - tags: Optional[List[TagResponse]] = None - downloads_count: Optional[int] = None - last_tested_version: Optional[str] = None - private: Optional[bool] = None + id: UUID | None = None + name: str | None = None + description: str | None = None + liked_by_count: int | None = None + liked_by_user: bool | None = None + is_component: bool | None = None + metadata: dict | None = {} + user_created: dict | None = {} + tags: list[TagResponse] | None = None + downloads_count: int | None = None + last_tested_version: str | None = None + private: bool | None = None # tags comes as a TagsIdResponse but we want to return a list of TagResponse @field_validator("tags", mode="before") @@ -44,33 +43,32 @@ def tags_to_list(cls, v): # if so, return v else transform to TagResponse if not v: return v - if all(["id" in tag and "name" in tag for tag in v]): + if all("id" in tag and "name" in tag for tag in v): return v - else: - return [TagResponse(**tag.get("tags_id")) for tag in v if tag.get("tags_id")] + return [TagResponse(**tag.get("tags_id")) for tag in v if tag.get("tags_id")] class ListComponentResponseModel(BaseModel): - count: Optional[int] = 0 + count: int | None = 0 authorized: bool - results: Optional[List[ListComponentResponse]] + results: list[ListComponentResponse] | None class DownloadComponentResponse(BaseModel): id: UUID - name: Optional[str] - description: Optional[str] - data: Optional[dict] - is_component: Optional[bool] - metadata: Optional[dict] = {} + name: str | None + description: str | None + data: dict | None + is_component: bool | None + metadata: dict | None = {} class StoreComponentCreate(BaseModel): name: str - description: Optional[str] + description: str | None data: dict - tags: Optional[List[str]] - parent: Optional[UUID] = None - is_component: Optional[bool] - last_tested_version: Optional[str] = None - private: Optional[bool] = True + tags: list[str] | None + parent: UUID | None = None + is_component: bool | None + last_tested_version: str | None = None + private: bool | None = True diff --git a/src/backend/base/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py index e0d3649d0ff1..90447effa876 100644 --- a/src/backend/base/langflow/services/store/service.py +++ b/src/backend/base/langflow/services/store/service.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import json -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any from uuid import UUID import httpx @@ -27,21 +29,22 @@ from contextlib import asynccontextmanager from contextvars import ContextVar -user_data_var: ContextVar[Optional[Dict[str, Any]]] = ContextVar("user_data", default=None) +user_data_var: ContextVar[dict[str, Any] | None] = ContextVar("user_data", default=None) @asynccontextmanager -async def user_data_context(store_service: "StoreService", api_key: Optional[str] = None): +async def user_data_context(store_service: StoreService, api_key: str | None = None): # Fetch and set user data to the context variable if api_key: try: - user_data, _ = await store_service._get( + user_data, _ = await store_service.get( f"{store_service.base_url}/users/me", api_key, params={"fields": "id"} ) user_data_var.set(user_data[0]) except HTTPStatusError as exc: - if exc.response.status_code == 403: - raise ValueError("Invalid API key") + if exc.response.status_code == httpx.codes.FORBIDDEN: + msg = "Invalid API key" + raise ValueError(msg) from exc try: yield finally: @@ -49,9 +52,8 @@ async def user_data_context(store_service: "StoreService", api_key: Optional[str user_data_var.set(None) -def get_id_from_search_string(search_string: str) -> Optional[str]: - """ - Extracts the ID from a search string. +def get_id_from_search_string(search_string: str) -> str | None: + """Extracts the ID from a search string. Args: search_string (str): The search string to extract the ID from. @@ -59,7 +61,7 @@ def get_id_from_search_string(search_string: str) -> Optional[str]: Returns: Optional[str]: The extracted ID, or None if no ID is found. """ - possible_id: Optional[str] = search_string + possible_id: str | None = search_string if "www.langflow.store/store/" in search_string: possible_id = search_string.split("/")[-1] @@ -71,13 +73,14 @@ def get_id_from_search_string(search_string: str) -> Optional[str]: class StoreService(Service): - """This is a service that integrates langflow with the store which - is a Directus instance. It allows to search, get and post components to - the store.""" + """This is a service that integrates langflow with the store which is a Directus instance. + + It allows to search, get and post components to the store. + """ name = "store_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service self.base_url = self.settings_service.settings.store_url self.download_webhook_url = self.settings_service.settings.download_webhook_url @@ -109,33 +112,32 @@ async def check_api_key(self, api_key: str): # If it is, return True # If it is not, return False try: - user_data, _ = await self._get(f"{self.base_url}/users/me", api_key, params={"fields": "id"}) + user_data, _ = await self.get(f"{self.base_url}/users/me", api_key, params={"fields": "id"}) return "id" in user_data[0] except HTTPStatusError as exc: - if exc.response.status_code in [403, 401]: + if exc.response.status_code in {403, 401}: return False - else: - raise ValueError(f"Unexpected status code: {exc.response.status_code}") + msg = f"Unexpected status code: {exc.response.status_code}" + raise ValueError(msg) from exc except Exception as exc: - raise ValueError(f"Unexpected error: {exc}") + msg = f"Unexpected error: {exc}" + raise ValueError(msg) from exc - async def _get( - self, url: str, api_key: Optional[str] = None, params: Optional[Dict[str, Any]] = None - ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: + async def get( + self, url: str, api_key: str | None = None, params: dict[str, Any] | None = None + ) -> tuple[list[dict[str, Any]], dict[str, Any]]: """Utility method to perform GET requests.""" - if api_key: - headers = {"Authorization": f"Bearer {api_key}"} - else: - headers = {} + headers = {"Authorization": f"Bearer {api_key}"} if api_key else {} async with httpx.AsyncClient() as client: try: response = await client.get(url, headers=headers, params=params, timeout=self.timeout) response.raise_for_status() - except HTTPError as exc: - raise exc + except HTTPError: + raise except Exception as exc: - raise ValueError(f"GET failed: {exc}") + msg = f"GET failed: {exc}" + raise ValueError(msg) from exc json_response = response.json() result = json_response["data"] metadata = {} @@ -157,22 +159,23 @@ async def call_webhook(self, api_key: str, webhook_url: str, component_id: UUID) ) response.raise_for_status() return response.json() - except HTTPError as exc: - raise exc - except Exception as exc: - logger.debug(f"Webhook failed: {exc}") + except HTTPError: + raise + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Webhook failed") - def build_tags_filter(self, tags: List[str]): - tags_filter: Dict[str, Any] = {"tags": {"_and": []}} + def build_tags_filter(self, tags: list[str]): + tags_filter: dict[str, Any] = {"tags": {"_and": []}} for tag in tags: tags_filter["tags"]["_and"].append({"_some": {"tags_id": {"name": {"_eq": tag}}}}) return tags_filter async def count_components( self, - filter_conditions: List[Dict[str, Any]], - api_key: Optional[str] = None, - use_api_key: Optional[bool] = False, + filter_conditions: list[dict[str, Any]], + *, + api_key: str | None = None, + use_api_key: bool | None = False, ) -> int: params = {"aggregate": json.dumps({"count": "*"})} if filter_conditions: @@ -180,14 +183,14 @@ async def count_components( api_key = api_key if use_api_key else None - results, _ = await self._get(self.components_url, api_key, params) + results, _ = await self.get(self.components_url, api_key, params) return int(results[0].get("count", 0)) @staticmethod def build_search_filter_conditions(query: str): # instead of build the param ?search=query, we will build the filter # that will use _icontains (case insensitive) - conditions: Dict[str, Any] = {"_or": []} + conditions: dict[str, Any] = {"_or": []} conditions["_or"].append({"name": {"_icontains": query}}) conditions["_or"].append({"description": {"_icontains": query}}) conditions["_or"].append({"tags": {"tags_id": {"name": {"_icontains": query}}}}) @@ -196,14 +199,15 @@ def build_search_filter_conditions(query: str): def build_filter_conditions( self, - component_id: Optional[str] = None, - search: Optional[str] = None, - private: Optional[bool] = None, - tags: Optional[List[str]] = None, - is_component: Optional[bool] = None, - filter_by_user: Optional[bool] = False, - liked: Optional[bool] = False, - store_api_key: Optional[str] = None, + *, + component_id: str | None = None, + search: str | None = None, + private: bool | None = None, + tags: list[str] | None = None, + is_component: bool | None = None, + filter_by_user: bool | None = False, + liked: bool | None = False, + store_api_key: str | None = None, ): filter_conditions = [] @@ -228,15 +232,18 @@ def build_filter_conditions( liked_filter = self.build_liked_filter() filter_conditions.append(liked_filter) elif liked and not store_api_key: - raise APIKeyError("You must provide an API key to filter by likes") + msg = "You must provide an API key to filter by likes" + raise APIKeyError(msg) if filter_by_user and store_api_key: user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) filter_conditions.append({"user_created": {"_eq": user_data["id"]}}) elif filter_by_user and not store_api_key: - raise APIKeyError("You must provide an API key to filter your components") + msg = "You must provide an API key to filter your components" + raise APIKeyError(msg) else: filter_conditions.append({"private": {"_eq": False}}) @@ -246,20 +253,22 @@ def build_liked_filter(self): user_data = user_data_var.get() # params["filter"] = json.dumps({"user_created": {"_eq": user_data["id"]}}) if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) return {"liked_by": {"directus_users_id": {"_eq": user_data["id"]}}} async def query_components( self, - api_key: Optional[str] = None, - sort: Optional[List[str]] = None, + *, + api_key: str | None = None, + sort: list[str] | None = None, page: int = 1, limit: int = 15, - fields: Optional[List[str]] = None, - filter_conditions: Optional[List[Dict[str, Any]]] = None, - use_api_key: Optional[bool] = False, - ) -> Tuple[List[ListComponentResponse], Dict[str, Any]]: - params: Dict[str, Any] = { + fields: list[str] | None = None, + filter_conditions: list[dict[str, Any]] | None = None, + use_api_key: bool | None = False, + ) -> tuple[list[ListComponentResponse], dict[str, Any]]: + params: dict[str, Any] = { "page": page, "limit": limit, "fields": ",".join(fields) if fields is not None else ",".join(self.default_fields), @@ -280,7 +289,7 @@ async def query_components( # so we don't need to risk passing an invalid api_key # and getting 401 api_key = api_key if use_api_key else None - results, metadata = await self._get(self.components_url, api_key, params) + results, metadata = await self.get(self.components_url, api_key, params) if isinstance(results, dict): results = [results] @@ -288,13 +297,14 @@ async def query_components( return results_objects, metadata - async def get_liked_by_user_components(self, component_ids: List[str], api_key: str) -> List[str]: + async def get_liked_by_user_components(self, component_ids: list[str], api_key: str) -> list[str]: # Get fields id # filter should be "id is in component_ids AND liked_by directus_users_id token is api_key" # return the ids user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) params = { "fields": "id", "filter": json.dumps( @@ -306,14 +316,15 @@ async def get_liked_by_user_components(self, component_ids: List[str], api_key: } ), } - results, _ = await self._get(self.components_url, api_key, params) + results, _ = await self.get(self.components_url, api_key, params) return [result["id"] for result in results] # Which of the components is parent of the user's components - async def get_components_in_users_collection(self, component_ids: List[str], api_key: str): + async def get_components_in_users_collection(self, component_ids: list[str], api_key: str): user_data = user_data_var.get() if not user_data: - raise ValueError("No user data") + msg = "No user data" + raise ValueError(msg) params = { "fields": "id", "filter": json.dumps( @@ -325,18 +336,20 @@ async def get_components_in_users_collection(self, component_ids: List[str], api } ), } - results, _ = await self._get(self.components_url, api_key, params) + results, _ = await self.get(self.components_url, api_key, params) return [result["id"] for result in results] async def download(self, api_key: str, component_id: UUID) -> DownloadComponentResponse: url = f"{self.components_url}/{component_id}" - params = {"fields": ",".join(["id", "name", "description", "data", "is_component", "metadata"])} + params = {"fields": "id,name,description,data,is_component,metadata"} if not self.download_webhook_url: - raise ValueError("DOWNLOAD_WEBHOOK_URL is not set") - component, _ = await self._get(url, api_key, params) + msg = "DOWNLOAD_WEBHOOK_URL is not set" + raise ValueError(msg) + component, _ = await self.get(url, api_key, params) await self.call_webhook(api_key, self.download_webhook_url, component_id) if len(component) > 1: - raise ValueError("Something went wrong while downloading the component") + msg = "Something went wrong while downloading the component" + raise ValueError(msg) component_dict = component[0] download_component = DownloadComponentResponse(**component_dict) @@ -345,8 +358,9 @@ async def download(self, api_key: str, component_id: UUID) -> DownloadComponentR # If it is, we need to build the metadata try: download_component.metadata = process_component_data(download_component.data.get("nodes", [])) - except KeyError: - raise ValueError("Invalid component data. No nodes found") + except KeyError as e: + msg = "Invalid component data. No nodes found" + raise ValueError(msg) from e return download_component async def upload(self, api_key: str, component_data: StoreComponentCreate) -> CreateComponentResponse: @@ -380,7 +394,8 @@ async def upload(self, api_key: str, component_data: StoreComponentCreate) -> Cr raise FilterError(message) except UnboundLocalError: pass - raise ValueError(f"Upload failed: {exc}") + msg = f"Upload failed: {exc}" + raise ValueError(msg) from exc async def update( self, api_key: str, component_id: UUID, component_data: StoreComponentCreate @@ -416,45 +431,49 @@ async def update( raise FilterError(message) except UnboundLocalError: pass - raise ValueError(f"Upload failed: {exc}") + msg = f"Upload failed: {exc}" + raise ValueError(msg) from exc - async def get_tags(self) -> List[Dict[str, Any]]: + async def get_tags(self) -> list[dict[str, Any]]: url = f"{self.base_url}/items/tags" - params = {"fields": ",".join(["id", "name"])} - tags, _ = await self._get(url, api_key=None, params=params) + params = {"fields": "id,name"} + tags, _ = await self.get(url, api_key=None, params=params) return tags - async def get_user_likes(self, api_key: str) -> List[Dict[str, Any]]: + async def get_user_likes(self, api_key: str) -> list[dict[str, Any]]: url = f"{self.base_url}/users/me" params = { - "fields": ",".join(["id", "likes"]), + "fields": "id,likes", } - likes, _ = await self._get(url, api_key, params) + likes, _ = await self.get(url, api_key, params) return likes - async def get_component_likes_count(self, component_id: str, api_key: Optional[str] = None) -> int: + async def get_component_likes_count(self, component_id: str, api_key: str | None = None) -> int: url = f"{self.components_url}/{component_id}" params = { - "fields": ",".join(["id", "count(liked_by)"]), + "fields": "id,count(liked_by)", } - result, _ = await self._get(url, api_key=api_key, params=params) + result, _ = await self.get(url, api_key=api_key, params=params) if len(result) == 0: - raise ValueError("Component not found") + msg = "Component not found" + raise ValueError(msg) likes = result[0]["liked_by_count"] # likes_by_count is a string # try to convert it to int try: likes = int(likes) - except ValueError: - raise ValueError(f"Unexpected value for likes count: {likes}") + except ValueError as e: + msg = f"Unexpected value for likes count: {likes}" + raise ValueError(msg) from e return likes async def like_component(self, api_key: str, component_id: str) -> bool: # if it returns a list with one id, it means the like was successful # if it returns an int, it means the like was removed if not self.like_webhook_url: - raise ValueError("LIKE_WEBHOOK_URL is not set") + msg = "LIKE_WEBHOOK_URL is not set" + raise ValueError(msg) headers = {"Authorization": f"Bearer {api_key}"} # response = httpx.post( # self.like_webhook_url, @@ -471,35 +490,36 @@ async def like_component(self, api_key: str, component_id: str) -> bool: timeout=self.timeout, ) response.raise_for_status() - if response.status_code == 200: + if response.status_code == httpx.codes.OK: result = response.json() if isinstance(result, list): return True - elif isinstance(result, int): + if isinstance(result, int): return False - else: - raise ValueError(f"Unexpected result: {result}") - else: - raise ValueError(f"Unexpected status code: {response.status_code}") + msg = f"Unexpected result: {result}" + raise ValueError(msg) + msg = f"Unexpected status code: {response.status_code}" + raise ValueError(msg) async def get_list_component_response_model( self, - component_id: Optional[str] = None, - search: Optional[str] = None, - private: Optional[bool] = None, - tags: Optional[List[str]] = None, - is_component: Optional[bool] = None, - fields: Optional[List[str]] = None, + *, + component_id: str | None = None, + search: str | None = None, + private: bool | None = None, + tags: list[str] | None = None, + is_component: bool | None = None, + fields: list[str] | None = None, filter_by_user: bool = False, liked: bool = False, - store_api_key: Optional[str] = None, - sort: Optional[List[str]] = None, + store_api_key: str | None = None, + sort: list[str] | None = None, page: int = 1, limit: int = 15, ): async with user_data_context(api_key=store_api_key, store_service=self): - filter_conditions: List[Dict[str, Any]] = self.build_filter_conditions( + filter_conditions: list[dict[str, Any]] = self.build_filter_conditions( component_id=component_id, search=search, private=private, @@ -510,9 +530,9 @@ async def get_list_component_response_model( store_api_key=store_api_key, ) - result: List[ListComponentResponse] = [] + result: list[ListComponentResponse] = [] authorized = False - metadata: Dict = {} + metadata: dict = {} comp_count = 0 try: result, metadata = await self.query_components( @@ -527,14 +547,15 @@ async def get_list_component_response_model( if metadata: comp_count = metadata.get("filter_count", 0) except HTTPStatusError as exc: - if exc.response.status_code == 403: - raise ForbiddenError("You are not authorized to access this public resource") from exc - elif exc.response.status_code == 401: - raise APIKeyError( - "You are not authorized to access this resource. Please check your API key." - ) from exc + if exc.response.status_code == httpx.codes.FORBIDDEN: + msg = "You are not authorized to access this public resource" + raise ForbiddenError(msg) from exc + if exc.response.status_code == httpx.codes.UNAUTHORIZED: + msg = "You are not authorized to access this resource. Please check your API key." + raise APIKeyError(msg) from exc except Exception as exc: - raise ValueError(f"Unexpected error: {exc}") from exc + msg = f"Unexpected error: {exc}" + raise ValueError(msg) from exc try: if result and not metadata: if len(result) >= limit: @@ -548,10 +569,12 @@ async def get_list_component_response_model( elif not metadata: comp_count = 0 except HTTPStatusError as exc: - if exc.response.status_code == 403: - raise ForbiddenError("You are not authorized to access this public resource") - elif exc.response.status_code == 401: - raise APIKeyError("You are not authorized to access this resource. Please check your API key.") + if exc.response.status_code == httpx.codes.FORBIDDEN: + msg = "You are not authorized to access this public resource" + raise ForbiddenError(msg) from exc + if exc.response.status_code == httpx.codes.UNAUTHORIZED: + msg = "You are not authorized to access this resource. Please check your API key." + raise APIKeyError(msg) from exc if store_api_key: # Now, from the result, we need to get the components @@ -568,7 +591,8 @@ async def get_list_component_response_model( ) authorized = True result = updated_result - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error updating components with user data") # If we get an error here, it means the user is not authorized authorized = False return ListComponentResponseModel(results=result, authorized=authorized, count=comp_count) diff --git a/src/backend/base/langflow/services/store/utils.py b/src/backend/base/langflow/services/store/utils.py index 9d8beb63215b..c761122a231a 100644 --- a/src/backend/base/langflow/services/store/utils.py +++ b/src/backend/base/langflow/services/store/utils.py @@ -1,6 +1,7 @@ -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING import httpx +from loguru import logger if TYPE_CHECKING: from langflow.services.store.schema import ListComponentResponse @@ -15,14 +16,13 @@ def process_tags_for_post(component_dict): async def update_components_with_user_data( - components: List["ListComponentResponse"], + components: list["ListComponentResponse"], store_service: "StoreService", store_api_key: str, + *, liked: bool, ): - """ - Updates the components with the user data (liked_by_user and in_users_collection) - """ + """Updates the components with the user data (liked_by_user and in_users_collection).""" component_ids = [str(component.id) for component in components] if liked: # If liked is True, this means all we got were liked_by_user components @@ -44,10 +44,11 @@ async def update_components_with_user_data( def get_lf_version_from_pypi(): try: response = httpx.get("https://pypi.org/pypi/langflow/json") - if response.status_code != 200: + if response.status_code != httpx.codes.OK: return None return response.json()["info"]["version"] - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error getting the latest version of langflow from PyPI") return None diff --git a/src/backend/base/langflow/services/task/backends/anyio.py b/src/backend/base/langflow/services/task/backends/anyio.py index 04309b8dfc31..8f167283fb12 100644 --- a/src/backend/base/langflow/services/task/backends/anyio.py +++ b/src/backend/base/langflow/services/task/backends/anyio.py @@ -1,5 +1,6 @@ import traceback -from typing import Any, Callable, Optional, Tuple +from collections.abc import Callable +from typing import Any import anyio from loguru import logger @@ -8,11 +9,11 @@ class AnyIOTaskResult: - def __init__(self, scope): + def __init__(self, scope) -> None: self._scope = scope self._status = "PENDING" self._result = None - self._exception = None + self._exception: Exception | None = None @property def status(self) -> str: @@ -33,10 +34,10 @@ def result(self) -> Any: def ready(self) -> bool: return self._status == "DONE" - async def run(self, func, *args, **kwargs): + async def run(self, func, *args, **kwargs) -> None: try: self._result = await func(*args, **kwargs) - except Exception as e: + except Exception as e: # noqa: BLE001 self._exception = e self._traceback = e.__traceback__ finally: @@ -46,14 +47,13 @@ async def run(self, func, *args, **kwargs): class AnyIOBackend(TaskBackend): name = "anyio" - def __init__(self): - self.tasks = {} + def __init__(self) -> None: + self.tasks: dict[str, AnyIOTaskResult] = {} async def launch_task( self, task_func: Callable[..., Any], *args: Any, **kwargs: Any - ) -> Tuple[Optional[str], Optional[AnyIOTaskResult]]: - """ - Launch a new task in an asynchronous manner. + ) -> tuple[str | None, AnyIOTaskResult | None]: + """Launch a new task in an asynchronous manner. Parameters: task_func: The asynchronous function to run. @@ -67,13 +67,14 @@ async def launch_task( try: task_result = AnyIOTaskResult(tg) tg.start_soon(task_result.run, task_func, *args, **kwargs) - task_id = str(id(task_result)) - self.tasks[task_id] = task_result - logger.info(f"Task {task_id} started.") - return task_id, task_result - except Exception as e: - logger.error(f"An error occurred while launching the task: {e}") + except Exception: # noqa: BLE001 + logger.exception("An error occurred while launching the task") return None, None + task_id = str(id(task_result)) + self.tasks[task_id] = task_result + logger.info(f"Task {task_id} started.") + return task_id, task_result + def get_task(self, task_id: str) -> Any: return self.tasks.get(task_id) diff --git a/src/backend/base/langflow/services/task/backends/base.py b/src/backend/base/langflow/services/task/backends/base.py index 93fbfd858d41..524d92ada493 100644 --- a/src/backend/base/langflow/services/task/backends/base.py +++ b/src/backend/base/langflow/services/task/backends/base.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod -from typing import Any, Callable +from collections.abc import Callable +from typing import Any class TaskBackend(ABC): diff --git a/src/backend/base/langflow/services/task/backends/celery.py b/src/backend/base/langflow/services/task/backends/celery.py index cfb17ae3b700..d26725e7676e 100644 --- a/src/backend/base/langflow/services/task/backends/celery.py +++ b/src/backend/base/langflow/services/task/backends/celery.py @@ -1,24 +1,27 @@ -from typing import Any, Callable - -from celery.result import AsyncResult # type: ignore +from collections.abc import Callable +from typing import TYPE_CHECKING, Any +from celery.result import AsyncResult from langflow.services.task.backends.base import TaskBackend from langflow.worker import celery_app +if TYPE_CHECKING: + from celery import Task + class CeleryBackend(TaskBackend): name = "celery" - def __init__(self): + def __init__(self) -> None: self.celery_app = celery_app def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any) -> tuple[str, AsyncResult]: # I need to type the delay method to make it easier - from celery import Task # type: ignore if not hasattr(task_func, "delay"): - raise ValueError(f"Task function {task_func} does not have a delay method") + msg = f"Task function {task_func} does not have a delay method" + raise ValueError(msg) task: Task = task_func.delay(*args, **kwargs) return task.id, AsyncResult(task.id, app=self.celery_app) diff --git a/src/backend/base/langflow/services/task/factory.py b/src/backend/base/langflow/services/task/factory.py index 937f390ae079..c030776de5ea 100644 --- a/src/backend/base/langflow/services/task/factory.py +++ b/src/backend/base/langflow/services/task/factory.py @@ -3,7 +3,7 @@ class TaskServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(TaskService) def create(self): diff --git a/src/backend/base/langflow/services/task/service.py b/src/backend/base/langflow/services/task/service.py index cca1645b84cb..b113cdc5dd0c 100644 --- a/src/backend/base/langflow/services/task/service.py +++ b/src/backend/base/langflow/services/task/service.py @@ -1,14 +1,17 @@ -from typing import TYPE_CHECKING, Any, Callable, Coroutine +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import TYPE_CHECKING, Any from loguru import logger from langflow.services.base import Service from langflow.services.task.backends.anyio import AnyIOBackend -from langflow.services.task.backends.base import TaskBackend from langflow.services.task.utils import get_celery_worker_status if TYPE_CHECKING: from langflow.services.settings.service import SettingsService + from langflow.services.task.backends.base import TaskBackend def check_celery_availability(): @@ -17,8 +20,8 @@ def check_celery_availability(): status = get_celery_worker_status(celery_app) logger.debug(f"Celery status: {status}") - except Exception as exc: - logger.debug(f"Celery not available: {exc}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Celery not available") status = {"availability": None} return status @@ -26,20 +29,19 @@ def check_celery_availability(): class TaskService(Service): name = "task_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service try: if self.settings_service.settings.celery_enabled: - USE_CELERY = True status = check_celery_availability() - USE_CELERY = status.get("availability") is not None + use_celery = status.get("availability") is not None else: - USE_CELERY = False + use_celery = False except ImportError: - USE_CELERY = False + use_celery = False - self.use_celery = USE_CELERY + self.use_celery = use_celery self.backend = self.get_backend() @property @@ -65,7 +67,8 @@ async def launch_and_await_task( if not self.use_celery: return None, await task_func(*args, **kwargs) if not hasattr(task_func, "apply"): - raise ValueError(f"Task function {task_func} does not have an apply method") + msg = f"Task function {task_func} does not have an apply method" + raise ValueError(msg) task = task_func.apply(args=args, kwargs=kwargs) result = task.get() diff --git a/src/backend/base/langflow/services/task/utils.py b/src/backend/base/langflow/services/task/utils.py index 5dfb03b83a81..fb155e70f036 100644 --- a/src/backend/base/langflow/services/task/utils.py +++ b/src/backend/base/langflow/services/task/utils.py @@ -1,9 +1,10 @@ -import contextlib from typing import TYPE_CHECKING if TYPE_CHECKING: + import contextlib + with contextlib.suppress(ImportError): - from celery import Celery # type: ignore + from celery import Celery def get_celery_worker_status(app: "Celery"): diff --git a/src/backend/base/langflow/services/telemetry/factory.py b/src/backend/base/langflow/services/telemetry/factory.py index 86f048112181..1fb087de7c39 100644 --- a/src/backend/base/langflow/services/telemetry/factory.py +++ b/src/backend/base/langflow/services/telemetry/factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.factory import ServiceFactory @@ -8,8 +10,8 @@ class TelemetryServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(TelemetryService) - def create(self, settings_service: "SettingsService"): + def create(self, settings_service: SettingsService): return TelemetryService(settings_service) diff --git a/src/backend/base/langflow/services/telemetry/opentelemetry.py b/src/backend/base/langflow/services/telemetry/opentelemetry.py index 81d0d873c36f..9daf19a8a1d0 100644 --- a/src/backend/base/langflow/services/telemetry/opentelemetry.py +++ b/src/backend/base/langflow/services/telemetry/opentelemetry.py @@ -1,7 +1,7 @@ import threading -import warnings +from collections.abc import Mapping from enum import Enum -from typing import Any, Dict, Mapping, Tuple, Union +from typing import Any from weakref import WeakValueDictionary from opentelemetry import metrics @@ -11,7 +11,7 @@ from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.resources import Resource -# a default OpenTelelmetry meter name +# a default OpenTelemetry meter name langflow_meter_name = "langflow" """ @@ -36,25 +36,25 @@ class MetricType(Enum): class ObservableGaugeWrapper: - """ - Wrapper class for ObservableGauge + """Wrapper class for ObservableGauge. + Since OpenTelemetry does not provide a way to set the value of an ObservableGauge, instead it uses a callback function to get the value, we need to create a wrapper class. """ def __init__(self, name: str, description: str, unit: str): - self._values: Dict[Tuple[Tuple[str, str], ...], float] = {} + self._values: dict[tuple[tuple[str, str], ...], float] = {} self._meter = metrics.get_meter(langflow_meter_name) self._gauge = self._meter.create_observable_gauge( name=name, description=description, unit=unit, callbacks=[self._callback] ) - def _callback(self, options: CallbackOptions): + def _callback(self, _options: CallbackOptions): return [Observation(value, attributes=dict(labels)) for labels, value in self._values.items()] # return [Observation(self._value)] - def set_value(self, value: float, labels: Mapping[str, str]): + def set_value(self, value: float, labels: Mapping[str, str]) -> None: self._values[tuple(sorted(labels.items()))] = value @@ -63,37 +63,35 @@ def __init__( self, name: str, description: str, - type: MetricType, - labels: Dict[str, bool], + metric_type: MetricType, + labels: dict[str, bool], unit: str = "", ): self.name = name self.description = description - self.type = type + self.type = metric_type self.unit = unit self.labels = labels self.mandatory_labels = [label for label, required in labels.items() if required] - self.allowed_labels = [label for label in labels.keys()] + self.allowed_labels = list(labels.keys()) - def validate_labels(self, labels: Mapping[str, str]): - """ - Validate if the labels provided are valid - """ + def validate_labels(self, labels: Mapping[str, str]) -> None: + """Validate if the labels provided are valid.""" if labels is None or len(labels) == 0: - raise ValueError("Labels must be provided for the metric") + msg = "Labels must be provided for the metric" + raise ValueError(msg) missing_labels = set(self.mandatory_labels) - set(labels.keys()) if missing_labels: - raise ValueError(f"Missing required labels: {missing_labels}") + msg = f"Missing required labels: {missing_labels}" + raise ValueError(msg) - def __repr__(self): + def __repr__(self) -> str: return f"Metric(name='{self.name}', description='{self.description}', type={self.type}, unit='{self.unit}')" class ThreadSafeSingletonMetaUsingWeakref(type): - """ - Thread-safe Singleton metaclass using WeakValueDictionary - """ + """Thread-safe Singleton metaclass using WeakValueDictionary.""" _instances: WeakValueDictionary[Any, Any] = WeakValueDictionary() _lock: threading.Lock = threading.Lock() @@ -102,24 +100,31 @@ def __call__(cls, *args, **kwargs): if cls not in cls._instances: with cls._lock: if cls not in cls._instances: - instance = super(ThreadSafeSingletonMetaUsingWeakref, cls).__call__(*args, **kwargs) + instance = super().__call__(*args, **kwargs) cls._instances[cls] = instance return cls._instances[cls] class OpenTelemetry(metaclass=ThreadSafeSingletonMetaUsingWeakref): - _metrics_registry: Dict[str, Metric] = dict() - - def _add_metric(self, name: str, description: str, unit: str, metric_type: MetricType, labels: Dict[str, bool]): - metric = Metric(name=name, description=description, type=metric_type, unit=unit, labels=labels) + _metrics_registry: dict[str, Metric] = {} + _metrics: dict[str, Counter | ObservableGaugeWrapper | Histogram | UpDownCounter] = {} + _meter_provider: MeterProvider | None = None + _initialized: bool = False # Add initialization flag + prometheus_enabled: bool = True + + def _add_metric( + self, name: str, description: str, unit: str, metric_type: MetricType, labels: dict[str, bool] + ) -> None: + metric = Metric(name=name, description=description, metric_type=metric_type, unit=unit, labels=labels) self._metrics_registry[name] = metric if labels is None or len(labels) == 0: - raise ValueError("Labels must be provided for the metric upon registration") + msg = "Labels must be provided for the metric upon registration" + raise ValueError(msg) - def _register_metric(self): - """ - Define any custom metrics here - A thread safe singleton class to manage metrics + def _register_metric(self) -> None: + """Define any custom metrics here. + + A thread safe singleton class to manage metrics. """ self._add_metric( name="file_uploads", @@ -136,93 +141,113 @@ def _register_metric(self): labels={"flow_id": mandatory_label}, ) - _metrics: Dict[str, Union[Counter, ObservableGaugeWrapper, Histogram, UpDownCounter]] = {} + def __init__(self, *, prometheus_enabled: bool = True): + # Only initialize once + self.prometheus_enabled = prometheus_enabled + if OpenTelemetry._initialized: + return + + if not self._metrics_registry: + self._register_metric() - def __init__(self, prometheus_enabled: bool = True): - self._register_metric() + if self._meter_provider is None: + # Get existing meter provider if any + existing_provider = metrics.get_meter_provider() - resource = Resource.create({"service.name": "langflow"}) - metric_readers = [] + # Check if FastAPI instrumentation is already set up + if hasattr(existing_provider, "get_meter") and existing_provider.get_meter("http.server"): + self._meter_provider = existing_provider + else: + resource = Resource.create({"service.name": "langflow"}) + metric_readers = [] + if self.prometheus_enabled: + metric_readers.append(PrometheusMetricReader()) - # configure prometheus exporter - self.prometheus_enabled = prometheus_enabled - if prometheus_enabled: - metric_readers.append(PrometheusMetricReader()) + self._meter_provider = MeterProvider(resource=resource, metric_readers=metric_readers) + metrics.set_meter_provider(self._meter_provider) - meter_provider = MeterProvider(resource=resource, metric_readers=metric_readers) - metrics.set_meter_provider(meter_provider) - self.meter = meter_provider.get_meter(langflow_meter_name) + self.meter = self._meter_provider.get_meter(langflow_meter_name) for name, metric in self._metrics_registry.items(): if name != metric.name: - raise ValueError(f"Key '{name}' does not match metric name '{metric.name}'") - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - + msg = f"Key '{name}' does not match metric name '{metric.name}'" + raise ValueError(msg) + if name not in self._metrics: self._metrics[metric.name] = self._create_metric(metric) + OpenTelemetry._initialized = True + def _create_metric(self, metric): + # Remove _created_instruments check + if metric.name in self._metrics: + return self._metrics[metric.name] + if metric.type == MetricType.COUNTER: return self.meter.create_counter( name=metric.name, unit=metric.unit, description=metric.description, ) - elif metric.type == MetricType.OBSERVABLE_GAUGE: + if metric.type == MetricType.OBSERVABLE_GAUGE: return ObservableGaugeWrapper( name=metric.name, description=metric.description, unit=metric.unit, ) - elif metric.type == MetricType.UP_DOWN_COUNTER: + if metric.type == MetricType.UP_DOWN_COUNTER: return self.meter.create_up_down_counter( name=metric.name, unit=metric.unit, description=metric.description, ) - elif metric.type == MetricType.HISTOGRAM: + if metric.type == MetricType.HISTOGRAM: return self.meter.create_histogram( name=metric.name, unit=metric.unit, description=metric.description, ) - else: - raise ValueError(f"Unknown metric type: {metric.type}") + msg = f"Unknown metric type: {metric.type}" + raise ValueError(msg) - def validate_labels(self, metric_name: str, labels: Mapping[str, str]): + def validate_labels(self, metric_name: str, labels: Mapping[str, str]) -> None: reg = self._metrics_registry.get(metric_name) if reg is None: - raise ValueError(f"Metric '{metric_name}' is not registered") + msg = f"Metric '{metric_name}' is not registered" + raise ValueError(msg) reg.validate_labels(labels) - def increment_counter(self, metric_name: str, labels: Mapping[str, str], value: float = 1.0): + def increment_counter(self, metric_name: str, labels: Mapping[str, str], value: float = 1.0) -> None: self.validate_labels(metric_name, labels) counter = self._metrics.get(metric_name) if isinstance(counter, Counter): counter.add(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a counter") + msg = f"Metric '{metric_name}' is not a counter" + raise TypeError(msg) - def up_down_counter(self, metric_name: str, value: float, labels: Mapping[str, str]): + def up_down_counter(self, metric_name: str, value: float, labels: Mapping[str, str]) -> None: self.validate_labels(metric_name, labels) up_down_counter = self._metrics.get(metric_name) if isinstance(up_down_counter, UpDownCounter): up_down_counter.add(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not an up down counter") + msg = f"Metric '{metric_name}' is not an up down counter" + raise TypeError(msg) - def update_gauge(self, metric_name: str, value: float, labels: Mapping[str, str]): + def update_gauge(self, metric_name: str, value: float, labels: Mapping[str, str]) -> None: self.validate_labels(metric_name, labels) gauge = self._metrics.get(metric_name) if isinstance(gauge, ObservableGaugeWrapper): gauge.set_value(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a gauge") + msg = f"Metric '{metric_name}' is not a gauge" + raise TypeError(msg) - def observe_histogram(self, metric_name: str, value: float, labels: Mapping[str, str]): + def observe_histogram(self, metric_name: str, value: float, labels: Mapping[str, str]) -> None: self.validate_labels(metric_name, labels) histogram = self._metrics.get(metric_name) if isinstance(histogram, Histogram): histogram.record(value, labels) else: - raise ValueError(f"Metric '{metric_name}' is not a histogram") + msg = f"Metric '{metric_name}' is not a histogram" + raise TypeError(msg) diff --git a/src/backend/base/langflow/services/telemetry/schema.py b/src/backend/base/langflow/services/telemetry/schema.py index a78629d5cb0d..15b753db98b0 100644 --- a/src/backend/base/langflow/services/telemetry/schema.py +++ b/src/backend/base/langflow/services/telemetry/schema.py @@ -1,15 +1,15 @@ -from pydantic import BaseModel +from pydantic import BaseModel, Field class RunPayload(BaseModel): - runIsWebhook: bool = False - runSeconds: int - runSuccess: bool - runErrorMessage: str = "" + run_is_webhook: bool = Field(default=False, serialization_alias="runIsWebhook") + run_seconds: int = Field(serialization_alias="runSeconds") + run_success: bool = Field(serialization_alias="runSuccess") + run_error_message: str = Field("", serialization_alias="runErrorMessage") class ShutdownPayload(BaseModel): - timeRunning: int + time_running: int = Field(serialization_alias="timeRunning") class VersionPayload(BaseModel): @@ -18,20 +18,20 @@ class VersionPayload(BaseModel): platform: str python: str arch: str - autoLogin: bool - cacheType: str - backendOnly: bool + auto_login: bool = Field(serialization_alias="autoLogin") + cache_type: str = Field(serialization_alias="cacheType") + backend_only: bool = Field(serialization_alias="backendOnly") class PlaygroundPayload(BaseModel): - playgroundSeconds: int - playgroundComponentCount: int | None = None - playgroundSuccess: bool - playgroundErrorMessage: str = "" + playground_seconds: int = Field(serialization_alias="playgroundSeconds") + playground_component_count: int | None = Field(None, serialization_alias="playgroundComponentCount") + playground_success: bool = Field(serialization_alias="playgroundSuccess") + playground_error_message: str = Field("", serialization_alias="playgroundErrorMessage") class ComponentPayload(BaseModel): - componentName: str - componentSeconds: int - componentSuccess: bool - componentErrorMessage: str | None = None + component_name: str = Field(serialization_alias="componentName") + component_seconds: int = Field(serialization_alias="componentSeconds") + component_success: bool = Field(serialization_alias="componentSuccess") + component_error_message: str | None = Field(serialization_alias="componentErrorMessage") diff --git a/src/backend/base/langflow/services/telemetry/service.py b/src/backend/base/langflow/services/telemetry/service.py index 59b95ec6fcc7..a41267c3f1d6 100644 --- a/src/backend/base/langflow/services/telemetry/service.py +++ b/src/backend/base/langflow/services/telemetry/service.py @@ -1,13 +1,14 @@ +from __future__ import annotations + import asyncio -import contextlib import os import platform +import sys from datetime import datetime, timezone from typing import TYPE_CHECKING import httpx from loguru import logger -from pydantic import BaseModel from langflow.services.base import Service from langflow.services.telemetry.opentelemetry import OpenTelemetry @@ -21,13 +22,15 @@ from langflow.utils.version import get_version_info if TYPE_CHECKING: + from pydantic import BaseModel + from langflow.services.settings.service import SettingsService class TelemetryService(Service): name = "telemetry_service" - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): super().__init__() self.settings_service = settings_service self.base_url = settings_service.settings.telemetry_base_url @@ -37,23 +40,24 @@ def __init__(self, settings_service: "SettingsService"): self._stopping = False self.ot = OpenTelemetry(prometheus_enabled=settings_service.settings.prometheus_enabled) + self.architecture: str | None = None # Check for do-not-track settings self.do_not_track = ( os.getenv("DO_NOT_TRACK", "False").lower() == "true" or settings_service.settings.do_not_track ) - async def telemetry_worker(self): + async def telemetry_worker(self) -> None: while self.running: func, payload, path = await self.telemetry_queue.get() try: await func(payload, path) - except Exception as e: - logger.error(f"Error sending telemetry data: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error sending telemetry data") finally: self.telemetry_queue.task_done() - async def send_telemetry_data(self, payload: BaseModel, path: str | None = None): + async def send_telemetry_data(self, payload: BaseModel, path: str | None = None) -> None: if self.do_not_track: logger.debug("Telemetry tracking is disabled.") return @@ -62,73 +66,86 @@ async def send_telemetry_data(self, payload: BaseModel, path: str | None = None) if path: url = f"{url}/{path}" try: - payload_dict = payload.model_dump(exclude_none=True, exclude_unset=True) + payload_dict = payload.model_dump(by_alias=True, exclude_none=True, exclude_unset=True) response = await self.client.get(url, params=payload_dict) - if response.status_code != 200: + if response.status_code != httpx.codes.OK: logger.error(f"Failed to send telemetry data: {response.status_code} {response.text}") else: logger.debug("Telemetry data sent successfully.") - except httpx.HTTPStatusError as e: - logger.error(f"HTTP error occurred: {e}") - except httpx.RequestError as e: - logger.error(f"Request error occurred: {e}") - except Exception as e: - logger.error(f"Unexpected error occurred: {e}") - - async def log_package_run(self, payload: RunPayload): + except httpx.HTTPStatusError: + logger.exception("HTTP error occurred") + except httpx.RequestError: + logger.exception("Request error occurred") + except Exception: # noqa: BLE001 + logger.exception("Unexpected error occurred") + + async def log_package_run(self, payload: RunPayload) -> None: await self._queue_event((self.send_telemetry_data, payload, "run")) - async def log_package_shutdown(self): - payload = ShutdownPayload(timeRunning=(datetime.now(timezone.utc) - self._start_time).seconds) + async def log_package_shutdown(self) -> None: + payload = ShutdownPayload(time_running=(datetime.now(timezone.utc) - self._start_time).seconds) await self._queue_event(payload) - async def _queue_event(self, payload): + async def _queue_event(self, payload) -> None: if self.do_not_track or self._stopping: return await self.telemetry_queue.put(payload) - async def log_package_version(self): + async def log_package_version(self) -> None: python_version = ".".join(platform.python_version().split(".")[:2]) version_info = get_version_info() - architecture = platform.architecture()[0] + if self.architecture is None: + self.architecture = (await asyncio.to_thread(platform.architecture))[0] payload = VersionPayload( package=version_info["package"].lower(), version=version_info["version"], platform=platform.platform(), python=python_version, - cacheType=self.settings_service.settings.cache_type, - backendOnly=self.settings_service.settings.backend_only, - arch=architecture, - autoLogin=self.settings_service.auth_settings.AUTO_LOGIN, + cache_type=self.settings_service.settings.cache_type, + backend_only=self.settings_service.settings.backend_only, + arch=self.architecture, + auto_login=self.settings_service.auth_settings.AUTO_LOGIN, ) await self._queue_event((self.send_telemetry_data, payload, None)) - async def log_package_playground(self, payload: PlaygroundPayload): + async def log_package_playground(self, payload: PlaygroundPayload) -> None: await self._queue_event((self.send_telemetry_data, payload, "playground")) - async def log_package_component(self, payload: ComponentPayload): + async def log_package_component(self, payload: ComponentPayload) -> None: await self._queue_event((self.send_telemetry_data, payload, "component")) - async def start(self): + def start(self) -> None: if self.running or self.do_not_track: return try: self.running = True self._start_time = datetime.now(timezone.utc) self.worker_task = asyncio.create_task(self.telemetry_worker()) - asyncio.create_task(self.log_package_version()) - except Exception as e: - logger.error(f"Error starting telemetry service: {e}") + self.log_package_version_task = asyncio.create_task(self.log_package_version()) + except Exception: # noqa: BLE001 + logger.exception("Error starting telemetry service") - async def flush(self): + async def flush(self) -> None: if self.do_not_track: return try: await self.telemetry_queue.join() - except Exception as e: - logger.error(f"Error flushing logs: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error flushing logs") - async def stop(self): + async def _cancel_task(self, task: asyncio.Task, cancel_msg: str) -> None: + task.cancel(cancel_msg) + try: + await task + except asyncio.CancelledError: + current_task = asyncio.current_task() + if sys.version_info >= (3, 11): + if current_task and current_task.cancelling() > 0: + raise + elif current_task and hasattr(current_task, "_must_cancel") and current_task._must_cancel: + raise + + async def stop(self) -> None: if self.do_not_track or self._stopping: return try: @@ -137,12 +154,12 @@ async def stop(self): await self.flush() self.running = False if self.worker_task: - self.worker_task.cancel() - with contextlib.suppress(asyncio.CancelledError): - await self.worker_task + await self._cancel_task(self.worker_task, "Cancel telemetry worker task") + if self.log_package_version_task: + await self._cancel_task(self.log_package_version_task, "Cancel telemetry log package version task") await self.client.aclose() - except Exception as e: - logger.error(f"Error stopping tracing service: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error stopping tracing service") - async def teardown(self): + async def teardown(self) -> None: await self.stop() diff --git a/src/backend/base/langflow/services/tracing/base.py b/src/backend/base/langflow/services/tracing/base.py index d2a0dd76a602..9b51c6e38d91 100644 --- a/src/backend/base/langflow/services/tracing/base.py +++ b/src/backend/base/langflow/services/tracing/base.py @@ -1,19 +1,26 @@ -from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, Optional -from uuid import UUID +from __future__ import annotations -from langflow.services.tracing.schema import Log +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: - from langflow.graph.vertex.base import Vertex + from collections.abc import Sequence + from uuid import UUID + from langchain.callbacks.base import BaseCallbackHandler + from langflow.graph.vertex.base import Vertex + from langflow.services.tracing.schema import Log + class BaseTracer(ABC): + trace_id: UUID + @abstractmethod def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id: UUID): raise NotImplementedError + @property @abstractmethod def ready(self) -> bool: raise NotImplementedError @@ -24,10 +31,10 @@ def add_trace( trace_id: str, trace_name: str, trace_type: str, - inputs: Dict[str, Any], - metadata: Dict[str, Any] | None = None, - vertex: Optional["Vertex"] = None, - ): + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, + vertex: Vertex | None = None, + ) -> None: raise NotImplementedError @abstractmethod @@ -35,22 +42,22 @@ def end_trace( self, trace_id: str, trace_name: str, - outputs: Dict[str, Any] | None = None, + outputs: dict[str, Any] | None = None, error: Exception | None = None, - logs: list[Log | dict] = [], - ): + logs: Sequence[Log | dict] = (), + ) -> None: raise NotImplementedError @abstractmethod def end( self, inputs: dict[str, Any], - outputs: Dict[str, Any], + outputs: dict[str, Any], error: Exception | None = None, metadata: dict[str, Any] | None = None, - ): + ) -> None: raise NotImplementedError @abstractmethod - def get_langchain_callback(self) -> Optional["BaseCallbackHandler"]: + def get_langchain_callback(self) -> BaseCallbackHandler | None: raise NotImplementedError diff --git a/src/backend/base/langflow/services/tracing/factory.py b/src/backend/base/langflow/services/tracing/factory.py index 01a561d6ab52..f1971622e7c8 100644 --- a/src/backend/base/langflow/services/tracing/factory.py +++ b/src/backend/base/langflow/services/tracing/factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.factory import ServiceFactory @@ -5,12 +7,11 @@ if TYPE_CHECKING: from langflow.services.settings.service import SettingsService - from langflow.services.monitor.service import MonitorService class TracingServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(TracingService) - def create(self, settings_service: "SettingsService", monitor_service: "MonitorService"): - return TracingService(settings_service, monitor_service) + def create(self, settings_service: SettingsService): + return TracingService(settings_service) diff --git a/src/backend/base/langflow/services/tracing/langfuse.py b/src/backend/base/langflow/services/tracing/langfuse.py index 1a01cbf6a313..0bcb192d5b9f 100644 --- a/src/backend/base/langflow/services/tracing/langfuse.py +++ b/src/backend/base/langflow/services/tracing/langfuse.py @@ -1,16 +1,23 @@ +from __future__ import annotations + import os -from typing import TYPE_CHECKING, Any, Dict, Optional -from uuid import UUID -from datetime import datetime +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any from loguru import logger +from typing_extensions import override from langflow.services.tracing.base import BaseTracer -from langflow.services.tracing.schema import Log if TYPE_CHECKING: - from langflow.graph.vertex.base import Vertex + from collections.abc import Sequence + from uuid import UUID + from langchain.callbacks.base import BaseCallbackHandler + from langfuse.client import StatefulSpanClient + + from langflow.graph.vertex.base import Vertex + from langflow.services.tracing.schema import Log class LangFuseTracer(BaseTracer): @@ -22,23 +29,21 @@ def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id self.trace_type = trace_type self.trace_id = trace_id self.flow_id = trace_name.split(" - ")[-1] - self.last_span = None + self.last_span: StatefulSpanClient | None = None self.spans: dict = {} - self._ready: bool = self.setup_langfuse() + + config = self._get_config() + self._ready: bool = self.setup_langfuse(config) if config else False @property def ready(self): return self._ready - def setup_langfuse(self) -> bool: + def setup_langfuse(self, config) -> bool: try: from langfuse import Langfuse from langfuse.callback.langchain import LangchainCallbackHandler - config = self._get_config() - if not all(config.values()): - raise ValueError("Missing Langfuse configuration") - self._client = Langfuse(**config) self.trace = self._client.trace(id=str(self.trace_id), name=self.flow_id) @@ -50,31 +55,32 @@ def setup_langfuse(self) -> bool: self._callback = LangchainCallbackHandler(**config) except ImportError: - logger.error("Could not import langfuse. Please install it with `pip install langfuse`.") + logger.exception("Could not import langfuse. Please install it with `pip install langfuse`.") return False - except Exception as e: - logger.debug(f"Error setting up LangSmith tracer: {e}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error setting up LangSmith tracer") return False return True + @override def add_trace( self, trace_id: str, trace_name: str, trace_type: str, - inputs: Dict[str, Any], - metadata: Dict[str, Any] | None = None, - vertex: Optional["Vertex"] = None, - ): - start_time = datetime.utcnow() + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, + vertex: Vertex | None = None, + ) -> None: + start_time = datetime.now(tz=timezone.utc) if not self._ready: return _metadata: dict = {} _metadata |= {"trace_type": trace_type} if trace_type else {} - _metadata |= metadata if metadata else {} + _metadata |= metadata or {} _name = trace_name.removesuffix(f" ({trace_id})") content_span = { @@ -84,54 +90,55 @@ def add_trace( "start_time": start_time, } - if self.last_span: - span = self.last_span.span(**content_span) - else: - span = self.trace.span(**content_span) + span = self.last_span.span(**content_span) if self.last_span else self.trace.span(**content_span) self.last_span = span self.spans[trace_id] = span + @override def end_trace( self, trace_id: str, trace_name: str, - outputs: Dict[str, Any] | None = None, + outputs: dict[str, Any] | None = None, error: Exception | None = None, - logs: list[Log | dict] = [], - ): - end_time = datetime.utcnow() + logs: Sequence[Log | dict] = (), + ) -> None: + end_time = datetime.now(tz=timezone.utc) if not self._ready: return span = self.spans.get(trace_id, None) if span: _output: dict = {} - _output |= outputs if outputs else {} + _output |= outputs or {} _output |= {"error": str(error)} if error else {} - _output |= {"logs": logs} if logs else {} + _output |= {"logs": list(logs)} if logs else {} content = {"output": _output, "end_time": end_time} span.update(**content) + @override def end( self, inputs: dict[str, Any], - outputs: Dict[str, Any], + outputs: dict[str, Any], error: Exception | None = None, metadata: dict[str, Any] | None = None, - ): + ) -> None: if not self._ready: return self._client.flush() - def get_langchain_callback(self) -> Optional["BaseCallbackHandler"]: + def get_langchain_callback(self) -> BaseCallbackHandler | None: if not self._ready: return None return None # self._callback - def _get_config(self): + def _get_config(self) -> dict: secret_key = os.getenv("LANGFUSE_SECRET_KEY", None) public_key = os.getenv("LANGFUSE_PUBLIC_KEY", None) host = os.getenv("LANGFUSE_HOST", None) - return {"secret_key": secret_key, "public_key": public_key, "host": host} + if secret_key and public_key and host: + return {"secret_key": secret_key, "public_key": public_key, "host": host} + return {} diff --git a/src/backend/base/langflow/services/tracing/langsmith.py b/src/backend/base/langflow/services/tracing/langsmith.py index 2e6bd795493c..1137c2ed15bb 100644 --- a/src/backend/base/langflow/services/tracing/langsmith.py +++ b/src/backend/base/langflow/services/tracing/langsmith.py @@ -1,20 +1,25 @@ +from __future__ import annotations + import os import traceback import types from datetime import datetime, timezone -from typing import TYPE_CHECKING, Any, Dict, Optional -from uuid import UUID +from typing import TYPE_CHECKING, Any from loguru import logger from langflow.schema.data import Data from langflow.services.tracing.base import BaseTracer -from langflow.services.tracing.schema import Log if TYPE_CHECKING: - from langflow.graph.vertex.base import Vertex + from collections.abc import Sequence + from uuid import UUID + from langchain.callbacks.base import BaseCallbackHandler + from langflow.graph.vertex.base import Vertex + from langflow.services.tracing.schema import Log + class LangSmithTracer(BaseTracer): def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id: UUID): @@ -36,15 +41,15 @@ def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id ) self._run_tree.add_event({"name": "Start", "time": datetime.now(timezone.utc).isoformat()}) self._children: dict[str, RunTree] = {} - except Exception as e: - logger.debug(f"Error setting up LangSmith tracer: {e}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error setting up LangSmith tracer") self._ready = False @property def ready(self): return self._ready - def setup_langsmith(self): + def setup_langsmith(self) -> bool: if os.getenv("LANGCHAIN_API_KEY") is None: return False try: @@ -52,21 +57,21 @@ def setup_langsmith(self): self._client = Client() except ImportError: - logger.error("Could not import langsmith. Please install it with `pip install langsmith`.") + logger.exception("Could not import langsmith. Please install it with `pip install langsmith`.") return False os.environ["LANGCHAIN_TRACING_V2"] = "true" return True def add_trace( self, - trace_id: str, + trace_id: str, # noqa: ARG002 trace_name: str, trace_type: str, - inputs: Dict[str, Any], - metadata: Dict[str, Any] | None = None, - vertex: Optional["Vertex"] = None, - ): - if not self._ready: + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, + vertex: Vertex | None = None, # noqa: ARG002 + ) -> None: + if not self._ready or not self._run_tree: return processed_inputs = {} if inputs: @@ -81,7 +86,7 @@ def add_trace( self._children[trace_name] = child self._child_link: dict[str, str] = {} - def _convert_to_langchain_types(self, io_dict: Dict[str, Any]): + def _convert_to_langchain_types(self, io_dict: dict[str, Any]): converted = {} for key, value in io_dict.items(): converted[key] = self._convert_to_langchain_type(value) @@ -112,13 +117,13 @@ def _convert_to_langchain_type(self, value): def end_trace( self, - trace_id: str, + trace_id: str, # noqa: ARG002 trace_name: str, - outputs: Dict[str, Any] | None = None, + outputs: dict[str, Any] | None = None, error: Exception | None = None, - logs: list[Log | dict] = [], + logs: Sequence[Log | dict] = (), ): - if not self._ready: + if not self._ready or trace_name not in self._children: return child = self._children[trace_name] raw_outputs = {} @@ -137,7 +142,7 @@ def end_trace( child.post() self._child_link[trace_name] = child.get_url() - def _error_to_string(self, error: Optional[Exception]): + def _error_to_string(self, error: Exception | None): error_message = None if error: string_stacktrace = traceback.format_exception(error) @@ -147,11 +152,11 @@ def _error_to_string(self, error: Optional[Exception]): def end( self, inputs: dict[str, Any], - outputs: Dict[str, Any], + outputs: dict[str, Any], error: Exception | None = None, metadata: dict[str, Any] | None = None, - ): - if not self._ready: + ) -> None: + if not self._ready or not self._run_tree: return self._run_tree.add_metadata({"inputs": inputs}) if metadata: @@ -160,5 +165,5 @@ def end( self._run_tree.post() self._run_link = self._run_tree.get_url() - def get_langchain_callback(self) -> Optional["BaseCallbackHandler"]: + def get_langchain_callback(self) -> BaseCallbackHandler | None: return None diff --git a/src/backend/base/langflow/services/tracing/langwatch.py b/src/backend/base/langflow/services/tracing/langwatch.py index 5783532fbe22..247da54b7e3d 100644 --- a/src/backend/base/langflow/services/tracing/langwatch.py +++ b/src/backend/base/langflow/services/tracing/langwatch.py @@ -1,18 +1,23 @@ -from typing import TYPE_CHECKING, Any, Dict, Optional, cast -from uuid import UUID +from __future__ import annotations -import nanoid # type: ignore +from typing import TYPE_CHECKING, Any, cast + +import nanoid from loguru import logger +from typing_extensions import override from langflow.schema.data import Data from langflow.services.tracing.base import BaseTracer -from langflow.services.tracing.schema import Log if TYPE_CHECKING: + from collections.abc import Sequence + from uuid import UUID + + from langchain.callbacks.base import BaseCallbackHandler from langwatch.tracer import ContextSpan from langflow.graph.vertex.base import Vertex - from langchain.callbacks.base import BaseCallbackHandler + from langflow.services.tracing.schema import Log class LangWatchTracer(BaseTracer): @@ -33,41 +38,43 @@ def __init__(self, trace_name: str, trace_type: str, project_name: str, trace_id self.trace = self._client.trace( trace_id=str(self.trace_id), ) - self.spans: dict[str, "ContextSpan"] = {} + self.spans: dict[str, ContextSpan] = {} name_without_id = " - ".join(trace_name.split(" - ")[0:-1]) self.trace.root_span.update( - span_id=f"{self.flow_id}-{nanoid.generate(size=6)}", # nanoid to make the span_id globally unique, which is required for LangWatch for now + # nanoid to make the span_id globally unique, which is required for LangWatch for now + span_id=f"{self.flow_id}-{nanoid.generate(size=6)}", name=name_without_id, type="workflow", ) - except Exception as e: - logger.debug(f"Error setting up LangWatch tracer: {e}") + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error setting up LangWatch tracer") self._ready = False @property def ready(self): return self._ready - def setup_langwatch(self): + def setup_langwatch(self) -> bool: try: import langwatch self._client = langwatch except ImportError: - logger.error("Could not import langwatch. Please install it with `pip install langwatch`.") + logger.exception("Could not import langwatch. Please install it with `pip install langwatch`.") return False return True + @override def add_trace( self, trace_id: str, trace_name: str, trace_type: str, - inputs: Dict[str, Any], - metadata: Dict[str, Any] | None = None, - vertex: Optional["Vertex"] = None, - ): + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, + vertex: Vertex | None = None, + ) -> None: if not self._ready: return # If user is not using session_id, then it becomes the same as flow_id, but @@ -84,7 +91,8 @@ def add_trace( ) span = self.trace.span( - span_id=f"{trace_id}-{nanoid.generate(size=6)}", # Add a nanoid to make the span_id globally unique, which is required for LangWatch for now + # Add a nanoid to make the span_id globally unique, which is required for LangWatch for now + span_id=f"{trace_id}-{nanoid.generate(size=6)}", name=name_without_id, type="component", parent=(previous_nodes[-1] if len(previous_nodes) > 0 else self.trace.root_span), @@ -93,14 +101,15 @@ def add_trace( self.trace.set_current_span(span) self.spans[trace_id] = span + @override def end_trace( self, trace_id: str, trace_name: str, - outputs: Dict[str, Any] | None = None, + outputs: dict[str, Any] | None = None, error: Exception | None = None, - logs: list[Log | dict] = [], - ): + logs: Sequence[Log | dict] = (), + ) -> None: if not self._ready: return if self.spans.get(trace_id): @@ -109,10 +118,10 @@ def end_trace( def end( self, inputs: dict[str, Any], - outputs: Dict[str, Any], + outputs: dict[str, Any], error: Exception | None = None, metadata: dict[str, Any] | None = None, - ): + ) -> None: if not self._ready: return self.trace.root_span.end( @@ -127,7 +136,7 @@ def end( if self.trace.api_key or self._client.api_key: self.trace.deferred_send_spans() - def _convert_to_langwatch_types(self, io_dict: Optional[Dict[str, Any]]): + def _convert_to_langwatch_types(self, io_dict: dict[str, Any] | None): from langwatch.utils import autoconvert_typed_values if io_dict is None: @@ -163,7 +172,7 @@ def _convert_to_langwatch_type(self, value): value = cast(dict, value.to_lc_document()) return value - def get_langchain_callback(self) -> Optional["BaseCallbackHandler"]: + def get_langchain_callback(self) -> BaseCallbackHandler | None: if self.trace is None: return None diff --git a/src/backend/base/langflow/services/tracing/service.py b/src/backend/base/langflow/services/tracing/service.py index 3e8a6ad13080..6d95c1a6beff 100644 --- a/src/backend/base/langflow/services/tracing/service.py +++ b/src/backend/base/langflow/services/tracing/service.py @@ -1,23 +1,25 @@ +from __future__ import annotations + import asyncio import os from collections import defaultdict from contextlib import asynccontextmanager -from typing import TYPE_CHECKING, Any, Dict, List, Optional -from uuid import UUID +from typing import TYPE_CHECKING, Any from loguru import logger from langflow.services.base import Service -from langflow.services.tracing.base import BaseTracer -from langflow.services.tracing.schema import Log if TYPE_CHECKING: + from uuid import UUID + from langchain.callbacks.base import BaseCallbackHandler from langflow.custom.custom_component.component import Component from langflow.graph.vertex.base import Vertex - from langflow.services.monitor.service import MonitorService from langflow.services.settings.service import SettingsService + from langflow.services.tracing.base import BaseTracer + from langflow.services.tracing.schema import Log def _get_langsmith_tracer(): @@ -41,48 +43,49 @@ def _get_langfuse_tracer(): class TracingService(Service): name = "tracing_service" - def __init__(self, settings_service: "SettingsService", monitor_service: "MonitorService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service - self.monitor_service = monitor_service self.inputs: dict[str, dict] = defaultdict(dict) self.inputs_metadata: dict[str, dict] = defaultdict(dict) self.outputs: dict[str, dict] = defaultdict(dict) self.outputs_metadata: dict[str, dict] = defaultdict(dict) self.run_name: str | None = None self.run_id: UUID | None = None - self.project_name = None + self.project_name: str | None = None self._tracers: dict[str, BaseTracer] = {} self._logs: dict[str, list[Log | dict[Any, Any]]] = defaultdict(list) self.logs_queue: asyncio.Queue = asyncio.Queue() self.running = False - self.worker_task = None + self.worker_task: asyncio.Task | None = None + self.end_trace_tasks: set[asyncio.Task] = set() + self.deactivated = self.settings_service.settings.deactivate_tracing - async def log_worker(self): + async def log_worker(self) -> None: while self.running or not self.logs_queue.empty(): log_func, args = await self.logs_queue.get() try: await log_func(*args) - except Exception as e: - logger.error(f"Error processing log: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error processing log") finally: self.logs_queue.task_done() - async def start(self): - if self.running: + async def start(self) -> None: + if self.running or self.deactivated: return try: self.running = True self.worker_task = asyncio.create_task(self.log_worker()) - except Exception as e: - logger.error(f"Error starting tracing service: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error starting tracing service") - async def flush(self): + async def flush(self) -> None: try: await self.logs_queue.join() - except Exception as e: - logger.error(f"Error flushing logs: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error flushing logs") - async def stop(self): + async def stop(self) -> None: try: self.running = False await self.flush() @@ -93,25 +96,25 @@ async def stop(self): self.worker_task.cancel() self.worker_task = None - except Exception as e: - logger.error(f"Error stopping tracing service: {e}") + except Exception: # noqa: BLE001 + logger.exception("Error stopping tracing service") - def _reset_io(self): + def _reset_io(self) -> None: self.inputs = defaultdict(dict) self.inputs_metadata = defaultdict(dict) self.outputs = defaultdict(dict) self.outputs_metadata = defaultdict(dict) - async def initialize_tracers(self): + async def initialize_tracers(self) -> None: try: await self.start() - self._initialize_langsmith_tracer() - self._initialize_langwatch_tracer() - self._initialize_langfuse_tracer() - except Exception as e: - logger.debug(f"Error initializing tracers: {e}") + await asyncio.to_thread(self._initialize_langsmith_tracer) + await asyncio.to_thread(self._initialize_langwatch_tracer) + await asyncio.to_thread(self._initialize_langfuse_tracer) + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug("Error initializing tracers") - def _initialize_langsmith_tracer(self): + def _initialize_langsmith_tracer(self) -> None: project_name = os.getenv("LANGCHAIN_PROJECT", "Langflow") self.project_name = project_name langsmith_tracer = _get_langsmith_tracer() @@ -122,10 +125,8 @@ def _initialize_langsmith_tracer(self): trace_id=self.run_id, ) - def _initialize_langwatch_tracer(self): - if ( - "langwatch" not in self._tracers or self._tracers["langwatch"].trace_id != self.run_id # type: ignore - ): + def _initialize_langwatch_tracer(self) -> None: + if "langwatch" not in self._tracers or self._tracers["langwatch"].trace_id != self.run_id: langwatch_tracer = _get_langwatch_tracer() self._tracers["langwatch"] = langwatch_tracer( trace_name=self.run_name, @@ -134,7 +135,7 @@ def _initialize_langwatch_tracer(self): trace_id=self.run_id, ) - def _initialize_langfuse_tracer(self): + def _initialize_langfuse_tracer(self) -> None: self.project_name = os.getenv("LANGCHAIN_PROJECT", "Langflow") langfuse_tracer = _get_langfuse_tracer() self._tracers["langfuse"] = langfuse_tracer( @@ -144,10 +145,10 @@ def _initialize_langfuse_tracer(self): trace_id=self.run_id, ) - def set_run_name(self, name: str): + def set_run_name(self, name: str) -> None: self.run_name = name - def set_run_id(self, run_id: UUID): + def set_run_id(self, run_id: UUID) -> None: self.run_id = run_id def _start_traces( @@ -155,61 +156,62 @@ def _start_traces( trace_id: str, trace_name: str, trace_type: str, - inputs: Dict[str, Any], - metadata: Optional[Dict[str, Any]] = None, - vertex: Optional["Vertex"] = None, - ): + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, + vertex: Vertex | None = None, + ) -> None: inputs = self._cleanup_inputs(inputs) self.inputs[trace_name] = inputs self.inputs_metadata[trace_name] = metadata or {} for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: continue try: tracer.add_trace(trace_id, trace_name, trace_type, inputs, metadata, vertex) - except Exception as e: - logger.error(f"Error starting trace {trace_name}: {e}") + except Exception: # noqa: BLE001 + logger.exception(f"Error starting trace {trace_name}") - def _end_traces(self, trace_id: str, trace_name: str, error: Exception | None = None): + def _end_traces(self, trace_id: str, trace_name: str, error: Exception | None = None) -> None: for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore - continue - try: - tracer.end_trace( - trace_id=trace_id, - trace_name=trace_name, - outputs=self.outputs[trace_name], - error=error, - logs=self._logs[trace_name], - ) - except Exception as e: - logger.error(f"Error ending trace {trace_name}: {e}") - - def _end_all_traces(self, outputs: dict, error: Exception | None = None): + if tracer.ready: + try: + tracer.end_trace( + trace_id=trace_id, + trace_name=trace_name, + outputs=self.outputs[trace_name], + error=error, + logs=self._logs[trace_name], + ) + except Exception: # noqa: BLE001 + logger.exception(f"Error ending trace {trace_name}") + + def _end_all_traces(self, outputs: dict, error: Exception | None = None) -> None: for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore - continue - try: - tracer.end(self.inputs, outputs=self.outputs, error=error, metadata=outputs) - except Exception as e: - logger.error(f"Error ending all traces: {e}") - - async def end(self, outputs: dict, error: Exception | None = None): - self._end_all_traces(outputs, error) + if tracer.ready: + try: + tracer.end(self.inputs, outputs=self.outputs, error=error, metadata=outputs) + except Exception: # noqa: BLE001 + logger.exception("Error ending all traces") + + async def end(self, outputs: dict, error: Exception | None = None) -> None: + await asyncio.to_thread(self._end_all_traces, outputs, error) self._reset_io() await self.stop() - def add_log(self, trace_name: str, log: Log): + def add_log(self, trace_name: str, log: Log) -> None: self._logs[trace_name].append(log) @asynccontextmanager async def trace_context( self, - component: "Component", + component: Component, trace_name: str, - inputs: Dict[str, Any], - metadata: Optional[Dict[str, Any]] = None, + inputs: dict[str, Any], + metadata: dict[str, Any] | None = None, ): + if self.deactivated: + yield self + return trace_id = trace_name if component._vertex: trace_id = component._vertex.id @@ -225,35 +227,39 @@ async def trace_context( try: yield self except Exception as e: - self._end_traces(trace_id, trace_name, e) - raise e - finally: - asyncio.create_task(await asyncio.to_thread(self._end_and_reset, trace_id, trace_name, None)) - - async def _end_and_reset(self, trace_id: str, trace_name: str, error: Exception | None = None): - self._end_traces(trace_id, trace_name, error) + self._end_and_reset(trace_id, trace_name, e) + raise + else: + self._end_and_reset(trace_id, trace_name) + + def _end_and_reset(self, trace_id: str, trace_name: str, error: Exception | None = None) -> None: + task = asyncio.create_task(asyncio.to_thread(self._end_traces, trace_id, trace_name, error)) + self.end_trace_tasks.add(task) + task.add_done_callback(self.end_trace_tasks.discard) self._reset_io() def set_outputs( self, trace_name: str, - outputs: Dict[str, Any], - output_metadata: Dict[str, Any] | None = None, - ): + outputs: dict[str, Any], + output_metadata: dict[str, Any] | None = None, + ) -> None: self.outputs[trace_name] |= outputs or {} self.outputs_metadata[trace_name] |= output_metadata or {} - def _cleanup_inputs(self, inputs: Dict[str, Any]): + def _cleanup_inputs(self, inputs: dict[str, Any]): inputs = inputs.copy() - for key in inputs.keys(): + for key in inputs: if "api_key" in key: inputs[key] = "*****" # avoid logging api_keys for security reasons return inputs - def get_langchain_callbacks(self) -> List["BaseCallbackHandler"]: + def get_langchain_callbacks(self) -> list[BaseCallbackHandler]: + if self.deactivated: + return [] callbacks = [] for tracer in self._tracers.values(): - if not tracer.ready: # type: ignore + if not tracer.ready: # type: ignore[truthy-function] continue langchain_callback = tracer.get_langchain_callback() if langchain_callback: diff --git a/src/backend/base/langflow/services/tracing/utils.py b/src/backend/base/langflow/services/tracing/utils.py index a90c94c9954d..b7df4c99c059 100644 --- a/src/backend/base/langflow/services/tracing/utils.py +++ b/src/backend/base/langflow/services/tracing/utils.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any from langflow.schema.data import Data @@ -20,14 +20,11 @@ def convert_to_langchain_type(value): else: value = value.to_lc_document() elif isinstance(value, Data): - if "text" in value.data: - value = value.to_lc_document() - else: - value = value.data + value = value.to_lc_document() if "text" in value.data else value.data return value -def convert_to_langchain_types(io_dict: Dict[str, Any]): +def convert_to_langchain_types(io_dict: dict[str, Any]): converted = {} for key, value in io_dict.items(): converted[key] = convert_to_langchain_type(value) diff --git a/src/backend/base/langflow/services/utils.py b/src/backend/base/langflow/services/utils.py index 558eaf0280e7..a017e79349e4 100644 --- a/src/backend/base/langflow/services/utils.py +++ b/src/backend/base/langflow/services/utils.py @@ -1,3 +1,5 @@ +import asyncio + from loguru import logger from sqlmodel import Session, select @@ -22,28 +24,27 @@ def get_or_create_super_user(session: Session, username, password, is_default): if user.is_superuser: if verify_password(password, user.password): return None - else: - # Superuser exists but password is incorrect - # which means that the user has changed the - # base superuser credentials. - # This means that the user has already created - # a superuser and changed the password in the UI - # so we don't need to do anything. - logger.debug( - "Superuser exists but password is incorrect. " - "This means that the user has changed the " - "base superuser credentials." - ) - return None - else: - logger.debug("User with superuser credentials exists but is not a superuser.") + # Superuser exists but password is incorrect + # which means that the user has changed the + # base superuser credentials. + # This means that the user has already created + # a superuser and changed the password in the UI + # so we don't need to do anything. + logger.debug( + "Superuser exists but password is incorrect. " + "This means that the user has changed the " + "base superuser credentials." + ) return None + logger.debug("User with superuser credentials exists but is not a superuser.") + return None if user: if verify_password(password, user.password): - raise ValueError("User with superuser credentials exists but is not a superuser.") - else: - raise ValueError("Incorrect superuser credentials") + msg = "User with superuser credentials exists but is not a superuser." + raise ValueError(msg) + msg = "Incorrect superuser credentials" + raise ValueError(msg) if is_default: logger.debug("Creating default superuser.") @@ -51,16 +52,17 @@ def get_or_create_super_user(session: Session, username, password, is_default): logger.debug("Creating superuser.") try: return create_super_user(username, password, db=session) - except Exception as exc: + except Exception as exc: # noqa: BLE001 if "UNIQUE constraint failed: user.username" in str(exc): # This is to deal with workers running this # at startup and trying to create the superuser # at the same time. - logger.debug("Superuser already exists.") + logger.opt(exception=True).debug("Superuser already exists.") return None + logger.opt(exception=True).debug("Error creating superuser.") -def setup_superuser(settings_service, session: Session): +def setup_superuser(settings_service, session: Session) -> None: if settings_service.auth_settings.AUTO_LOGIN: logger.debug("AUTO_LOGIN is set to True. Creating default superuser.") else: @@ -78,15 +80,14 @@ def setup_superuser(settings_service, session: Session): logger.debug("Superuser created successfully.") except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not create superuser. Please create a superuser manually.") from exc + msg = "Could not create superuser. Please create a superuser manually." + raise RuntimeError(msg) from exc finally: settings_service.auth_settings.reset_credentials() -def teardown_superuser(settings_service, session): - """ - Teardown the superuser. - """ +def teardown_superuser(settings_service, session) -> None: + """Teardown the superuser.""" # If AUTO_LOGIN is True, we will remove the default superuser # from the database. @@ -106,40 +107,41 @@ def teardown_superuser(settings_service, session): except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not remove default superuser.") from exc + session.rollback() + msg = "Could not remove default superuser." + raise RuntimeError(msg) from exc + + +def _teardown_superuser(): + with get_db_service().with_session() as session: + teardown_superuser(get_settings_service(), session) -async def teardown_services(): - """ - Teardown all the services. - """ +async def teardown_services() -> None: + """Teardown all the services.""" try: - teardown_superuser(get_settings_service(), next(get_session())) - except Exception as exc: + await asyncio.to_thread(_teardown_superuser) + except Exception as exc: # noqa: BLE001 logger.exception(exc) try: from langflow.services.manager import service_manager await service_manager.teardown() - except Exception as exc: + except Exception as exc: # noqa: BLE001 logger.exception(exc) -def initialize_settings_service(): - """ - Initialize the settings manager. - """ +def initialize_settings_service() -> None: + """Initialize the settings manager.""" from langflow.services.settings import factory as settings_factory get_service(ServiceType.SETTINGS_SERVICE, settings_factory.SettingsServiceFactory()) -def initialize_session_service(): - """ - Initialize the session manager. - """ +def initialize_session_service() -> None: + """Initialize the session manager.""" from langflow.services.cache import factory as cache_factory - from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.session import factory as session_service_factory initialize_settings_service() @@ -154,20 +156,16 @@ def initialize_session_service(): ) -def initialize_services(fix_migration: bool = False, socketio_server=None): - """ - Initialize all the services needed. - """ +def initialize_services(*, fix_migration: bool = False) -> None: + """Initialize all the services needed.""" # Test cache connection get_service(ServiceType.CACHE_SERVICE, default=CacheServiceFactory()) # Setup the superuser - try: - initialize_database(fix_migration=fix_migration) - except Exception as exc: - raise exc + initialize_database(fix_migration=fix_migration) setup_superuser(get_service(ServiceType.SETTINGS_SERVICE), next(get_session())) try: get_db_service().migrate_flows_if_auto_login() except Exception as exc: - logger.error(f"Error migrating flows: {exc}") - raise RuntimeError("Error migrating flows") from exc + msg = "Error migrating flows" + logger.exception(msg) + raise RuntimeError(msg) from exc diff --git a/src/backend/base/langflow/services/variable/base.py b/src/backend/base/langflow/services/variable/base.py index 632b03af5ada..7cbb91637ed9 100644 --- a/src/backend/base/langflow/services/variable/base.py +++ b/src/backend/base/langflow/services/variable/base.py @@ -1,5 +1,4 @@ import abc -from typing import Optional, Union from uuid import UUID from sqlmodel import Session @@ -9,16 +8,13 @@ class VariableService(Service): - """ - Abstract base class for a variable service. - """ + """Abstract base class for a variable service.""" name = "variable_service" @abc.abstractmethod - def initialize_user_variables(self, user_id: Union[UUID, str], session: Session) -> None: - """ - Initialize user variables. + def initialize_user_variables(self, user_id: UUID | str, session: Session) -> None: + """Initialize user variables. Args: user_id: The user ID. @@ -26,9 +22,8 @@ def initialize_user_variables(self, user_id: Union[UUID, str], session: Session) """ @abc.abstractmethod - def get_variable(self, user_id: Union[UUID, str], name: str, field: str, session: Session) -> str: - """ - Get a variable value. + def get_variable(self, user_id: UUID | str, name: str, field: str, session: Session) -> str: + """Get a variable value. Args: user_id: The user ID. @@ -41,9 +36,8 @@ def get_variable(self, user_id: Union[UUID, str], name: str, field: str, session """ @abc.abstractmethod - def list_variables(self, user_id: Union[UUID, str], session: Session) -> list[Optional[str]]: - """ - List all variables. + def list_variables(self, user_id: UUID | str, session: Session) -> list[str | None]: + """List all variables. Args: user_id: The user ID. @@ -54,9 +48,8 @@ def list_variables(self, user_id: Union[UUID, str], session: Session) -> list[Op """ @abc.abstractmethod - def update_variable(self, user_id: Union[UUID, str], name: str, value: str, session: Session) -> Variable: - """ - Update a variable. + def update_variable(self, user_id: UUID | str, name: str, value: str, session: Session) -> Variable: + """Update a variable. Args: user_id: The user ID. @@ -69,9 +62,8 @@ def update_variable(self, user_id: Union[UUID, str], name: str, value: str, sess """ @abc.abstractmethod - def delete_variable(self, user_id: Union[UUID, str], name: str, session: Session) -> None: - """ - Delete a variable. + def delete_variable(self, user_id: UUID | str, name: str, session: Session) -> None: + """Delete a variable. Args: user_id: The user ID. @@ -83,9 +75,8 @@ def delete_variable(self, user_id: Union[UUID, str], name: str, session: Session """ @abc.abstractmethod - def delete_variable_by_id(self, user_id: Union[UUID, str], variable_id: UUID, session: Session) -> None: - """ - Delete a variable by ID. + def delete_variable_by_id(self, user_id: UUID | str, variable_id: UUID, session: Session) -> None: + """Delete a variable by ID. Args: user_id: The user ID. @@ -96,15 +87,15 @@ def delete_variable_by_id(self, user_id: Union[UUID, str], variable_id: UUID, se @abc.abstractmethod def create_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, value: str, + *, default_fields: list[str], _type: str, session: Session, ) -> Variable: - """ - Create a variable. + """Create a variable. Args: user_id: The user ID. diff --git a/src/backend/base/langflow/services/variable/constants.py b/src/backend/base/langflow/services/variable/constants.py new file mode 100644 index 000000000000..3318e7a54541 --- /dev/null +++ b/src/backend/base/langflow/services/variable/constants.py @@ -0,0 +1,2 @@ +CREDENTIAL_TYPE = "Credential" +GENERIC_TYPE = "Generic" diff --git a/src/backend/base/langflow/services/variable/factory.py b/src/backend/base/langflow/services/variable/factory.py index 18d63bc0c139..160bb92f6eb4 100644 --- a/src/backend/base/langflow/services/variable/factory.py +++ b/src/backend/base/langflow/services/variable/factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from langflow.services.factory import ServiceFactory @@ -8,10 +10,10 @@ class VariableServiceFactory(ServiceFactory): - def __init__(self): + def __init__(self) -> None: super().__init__(VariableService) - def create(self, settings_service: "SettingsService"): + def create(self, settings_service: SettingsService): # here you would have logic to create and configure a VariableService # based on the settings_service @@ -20,5 +22,4 @@ def create(self, settings_service: "SettingsService"): from langflow.services.variable.kubernetes import KubernetesSecretService return KubernetesSecretService(settings_service) - else: - return DatabaseVariableService(settings_service) + return DatabaseVariableService(settings_service) diff --git a/src/backend/base/langflow/services/variable/kubernetes.py b/src/backend/base/langflow/services/variable/kubernetes.py index 62a044d5385d..b5206f2333fc 100644 --- a/src/backend/base/langflow/services/variable/kubernetes.py +++ b/src/backend/base/langflow/services/variable/kubernetes.py @@ -1,26 +1,34 @@ +from __future__ import annotations + import os -from typing import Optional, Tuple, Union -from uuid import UUID +from typing import TYPE_CHECKING from loguru import logger -from sqlmodel import Session +from typing_extensions import override from langflow.services.auth import utils as auth_utils from langflow.services.base import Service from langflow.services.database.models.variable.model import Variable, VariableCreate -from langflow.services.settings.service import SettingsService from langflow.services.variable.base import VariableService +from langflow.services.variable.constants import CREDENTIAL_TYPE, GENERIC_TYPE from langflow.services.variable.kubernetes_secrets import KubernetesSecretManager, encode_user_id -from langflow.services.variable.service import CREDENTIAL_TYPE, GENERIC_TYPE + +if TYPE_CHECKING: + from uuid import UUID + + from sqlmodel import Session + + from langflow.services.settings.service import SettingsService class KubernetesSecretService(VariableService, Service): - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service # TODO: settings_service to set kubernetes namespace self.kubernetes_secrets = KubernetesSecretManager() - def initialize_user_variables(self, user_id: Union[UUID, str], session: Session): + @override + def initialize_user_variables(self, user_id: UUID | str, session: Session) -> None: # Check for environment variables that should be stored in the database should_or_should_not = "Should" if self.settings_service.settings.store_environment_variables else "Should not" logger.info(f"{should_or_should_not} store environment variables in the kubernetes.") @@ -41,8 +49,8 @@ def initialize_user_variables(self, user_id: Union[UUID, str], session: Session) name=secret_name, data=variables, ) - except Exception as e: - logger.error(f"Error creating {var} variable: {e}") + except Exception: # noqa: BLE001 + logger.exception(f"Error creating {var} variable") else: logger.info("Skipping environment variable storage.") @@ -51,49 +59,50 @@ def initialize_user_variables(self, user_id: Union[UUID, str], session: Session) def resolve_variable( self, secret_name: str, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: variables = self.kubernetes_secrets.get_secret(name=secret_name) if not variables: - raise ValueError(f"user_id {user_id} variable not found.") + msg = f"user_id {user_id} variable not found." + raise ValueError(msg) if name in variables: return name, variables[name] - else: - credential_name = CREDENTIAL_TYPE + "_" + name - if credential_name in variables: - return credential_name, variables[credential_name] - else: - raise ValueError(f"user_id {user_id} variable name {name} not found.") + credential_name = CREDENTIAL_TYPE + "_" + name + if credential_name in variables: + return credential_name, variables[credential_name] + msg = f"user_id {user_id} variable name {name} not found." + raise ValueError(msg) def get_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, field: str, _session: Session, ) -> str: secret_name = encode_user_id(user_id) key, value = self.resolve_variable(secret_name, user_id, name) - if key.startswith(CREDENTIAL_TYPE + "_") and field == "session_id": # type: ignore - raise TypeError( + if key.startswith(CREDENTIAL_TYPE + "_") and field == "session_id": + msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." ) + raise TypeError(msg) return value def list_variables( self, - user_id: Union[UUID, str], + user_id: UUID | str, _session: Session, - ) -> list[Optional[str]]: + ) -> list[str | None]: variables = self.kubernetes_secrets.get_secret(name=encode_user_id(user_id)) if not variables: return [] names = [] - for key in variables.keys(): + for key in variables: if key.startswith(CREDENTIAL_TYPE + "_"): names.append(key[len(CREDENTIAL_TYPE) + 1 :]) else: @@ -102,7 +111,7 @@ def list_variables( def update_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, value: str, _session: Session, @@ -111,24 +120,25 @@ def update_variable( secret_key, _ = self.resolve_variable(secret_name, user_id, name) return self.kubernetes_secrets.update_secret(name=secret_name, data={secret_key: value}) - def delete_variable(self, user_id: Union[UUID, str], name: str, _session: Session) -> None: + def delete_variable(self, user_id: UUID | str, name: str, _session: Session) -> None: secret_name = encode_user_id(user_id) secret_key, _ = self.resolve_variable(secret_name, user_id, name) self.kubernetes_secrets.delete_secret_key(name=secret_name, key=secret_key) - return - def delete_variable_by_id(self, user_id: Union[UUID, str], variable_id: UUID | str, _session: Session) -> None: + def delete_variable_by_id(self, user_id: UUID | str, variable_id: UUID | str, _session: Session) -> None: self.delete_variable(user_id, _session, str(variable_id)) + @override def create_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, value: str, + *, default_fields: list[str], _type: str, - _session: Session, + session: Session, ) -> Variable: secret_name = encode_user_id(user_id) secret_key = name @@ -145,5 +155,4 @@ def create_variable( value=auth_utils.encrypt_api_key(value, settings_service=self.settings_service), default_fields=default_fields, ) - variable = Variable.model_validate(variable_base, from_attributes=True, update={"user_id": user_id}) - return variable + return Variable.model_validate(variable_base, from_attributes=True, update={"user_id": user_id}) diff --git a/src/backend/base/langflow/services/variable/kubernetes_secrets.py b/src/backend/base/langflow/services/variable/kubernetes_secrets.py index a72fbd37a335..6764037a17c3 100644 --- a/src/backend/base/langflow/services/variable/kubernetes_secrets.py +++ b/src/backend/base/langflow/services/variable/kubernetes_secrets.py @@ -1,20 +1,17 @@ -from kubernetes import client, config # type: ignore -from kubernetes.client.rest import ApiException # type: ignore -from base64 import b64encode, b64decode +from base64 import b64decode, b64encode +from http import HTTPStatus +from uuid import UUID +from kubernetes import client, config +from kubernetes.client.rest import ApiException from loguru import logger -from typing import Union -from uuid import UUID class KubernetesSecretManager: - """ - A class for managing Kubernetes secrets. - """ + """A class for managing Kubernetes secrets.""" def __init__(self, namespace: str = "langflow"): - """ - Initialize the KubernetesSecretManager class. + """Initialize the KubernetesSecretManager class. Args: namespace (str): The namespace in which to perform secret operations. @@ -25,9 +22,13 @@ def __init__(self, namespace: str = "langflow"): # initialize the Kubernetes API client self.core_api = client.CoreV1Api() - def create_secret(self, name: str, data: dict, secret_type: str = "Opaque"): - """ - Create a new secret in the specified namespace. + def create_secret( + self, + name: str, + data: dict, + secret_type: str = "Opaque", # noqa: S107 + ): + """Create a new secret in the specified namespace. Args: name (str): The name of the secret to create. @@ -46,9 +47,9 @@ def create_secret(self, name: str, data: dict, secret_type: str = "Opaque"): return self.core_api.create_namespaced_secret(self.namespace, secret) - def upsert_secret(self, secret_name: str, data: dict, secret_type: str = "Opaque"): - """ - Upsert a secret in the specified namespace. + def upsert_secret(self, secret_name: str, data: dict): + """Upsert a secret in the specified namespace. + If the secret doesn't exist, it will be created. If it exists, it will be updated with new data while preserving existing keys. @@ -72,16 +73,14 @@ def upsert_secret(self, secret_name: str, data: dict, secret_type: str = "Opaque return self.core_api.replace_namespaced_secret(secret_name, self.namespace, existing_secret) except ApiException as e: - if e.status == 404: + if e.status == HTTPStatus.NOT_FOUND: # Secret doesn't exist, create a new one return self.create_secret(secret_name, data) - else: - logger.error(f"Error upserting secret {secret_name}: {e}") - raise + logger.exception(f"Error upserting secret {secret_name}") + raise def get_secret(self, name: str) -> dict | None: - """ - Read a secret from the specified namespace. + """Read a secret from the specified namespace. Args: name (str): The name of the secret to read. @@ -93,13 +92,12 @@ def get_secret(self, name: str) -> dict | None: secret = self.core_api.read_namespaced_secret(name, self.namespace) return {k: b64decode(v).decode() for k, v in secret.data.items()} except ApiException as e: - if e.status == 404: + if e.status == HTTPStatus.NOT_FOUND: return None raise def update_secret(self, name: str, data: dict): - """ - Update an existing secret in the specified namespace. + """Update an existing secret in the specified namespace. Args: name (str): The name of the secret to update. @@ -121,8 +119,7 @@ def update_secret(self, name: str, data: dict): return self.core_api.replace_namespaced_secret(name, self.namespace, secret) def delete_secret_key(self, name: str, key: str): - """ - Delete a key from the specified secret in the namespace. + """Delete a key from the specified secret in the namespace. Args: name (str): The name of the secret. @@ -146,8 +143,7 @@ def delete_secret_key(self, name: str, key: str): return self.core_api.replace_namespaced_secret(name, self.namespace, secret) def delete_secret(self, name: str): - """ - Delete a secret from the specified namespace. + """Delete a secret from the specified namespace. Args: name (str): The name of the secret to delete. @@ -160,36 +156,37 @@ def delete_secret(self, name: str): # utility function to encode user_id to base64 lower case and numbers only # this is required by kubernetes secret name restrictions -def encode_user_id(user_id: Union[UUID | str]) -> str: +def encode_user_id(user_id: UUID | str) -> str: # Handle UUID if isinstance(user_id, UUID): return f"uuid-{str(user_id).lower()}"[:253] # Convert string to lowercase - id = str(user_id).lower() + _user_id = str(user_id).lower() # If the user_id looks like an email, replace @ and . with allowed characters - if "@" in id or "." in id: - id = id.replace("@", "-at-").replace(".", "-dot-") + if "@" in _user_id or "." in _user_id: + _user_id = _user_id.replace("@", "-at-").replace(".", "-dot-") # Encode the user_id to base64 # encoded = base64.b64encode(user_id.encode("utf-8")).decode("utf-8") # Replace characters not allowed in Kubernetes names - id = id.replace("+", "-").replace("/", "_").rstrip("=") + _user_id = _user_id.replace("+", "-").replace("/", "_").rstrip("=") # Ensure the name starts with an alphanumeric character - if not id[0].isalnum(): - id = "a-" + id + if not _user_id[0].isalnum(): + _user_id = "a-" + _user_id # Truncate to 253 characters (Kubernetes name length limit) - id = id[:253] + _user_id = _user_id[:253] - if not all(c.isalnum() or c in "-_" for c in id): - raise ValueError(f"Invalid user_id: {id}") + if not all(c.isalnum() or c in "-_" for c in _user_id): + msg = f"Invalid user_id: {_user_id}" + raise ValueError(msg) # Ensure the name ends with an alphanumeric character - while not id[-1].isalnum(): - id = id[:-1] + while not _user_id[-1].isalnum(): + _user_id = _user_id[:-1] - return id + return _user_id diff --git a/src/backend/base/langflow/services/variable/service.py b/src/backend/base/langflow/services/variable/service.py index 740ff4f1ab5d..11eb086218d5 100644 --- a/src/backend/base/langflow/services/variable/service.py +++ b/src/backend/base/langflow/services/variable/service.py @@ -1,112 +1,99 @@ +from __future__ import annotations + import os from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional, Union -from uuid import UUID +from typing import TYPE_CHECKING -from fastapi import Depends from loguru import logger from sqlmodel import Session, select from langflow.services.auth import utils as auth_utils from langflow.services.base import Service from langflow.services.database.models.variable.model import Variable, VariableCreate, VariableUpdate -from langflow.services.deps import get_session from langflow.services.variable.base import VariableService +from langflow.services.variable.constants import CREDENTIAL_TYPE, GENERIC_TYPE if TYPE_CHECKING: - from langflow.services.settings.service import SettingsService + from collections.abc import Sequence + from uuid import UUID -CREDENTIAL_TYPE = "Credential" -GENERIC_TYPE = "Generic" + from langflow.services.settings.service import SettingsService class DatabaseVariableService(VariableService, Service): - def __init__(self, settings_service: "SettingsService"): + def __init__(self, settings_service: SettingsService): self.settings_service = settings_service - def initialize_user_variables(self, user_id: Union[UUID, str], session: Session = Depends(get_session)): - # Check for environment variables that should be stored in the database - should_or_should_not = "Should" if self.settings_service.settings.store_environment_variables else "Should not" - logger.info(f"{should_or_should_not} store environment variables in the database.") - if self.settings_service.settings.store_environment_variables: - for var in self.settings_service.settings.variables_to_get_from_environment: - if var in os.environ: - logger.debug(f"Creating {var} variable from environment.") - - if found_variable := session.exec( - select(Variable).where(Variable.user_id == user_id, Variable.name == var) - ).first(): - # Update it - value = os.environ[var] - if isinstance(value, str): - value = value.strip() - # If the secret_key changes the stored value could be invalid - # so we need to re-encrypt it - encrypted = auth_utils.encrypt_api_key(value, settings_service=self.settings_service) - found_variable.value = encrypted - session.add(found_variable) - session.commit() - else: - # Create it - try: - value = os.environ[var] - if isinstance(value, str): - value = value.strip() - self.create_variable( - user_id=user_id, - name=var, - value=value, - default_fields=[], - _type=CREDENTIAL_TYPE, - session=session, - ) - except Exception as e: - logger.error(f"Error creating {var} variable: {e}") - - else: + def initialize_user_variables(self, user_id: UUID | str, session: Session) -> None: + if not self.settings_service.settings.store_environment_variables: logger.info("Skipping environment variable storage.") + return + + logger.info("Storing environment variables in the database.") + for var_name in self.settings_service.settings.variables_to_get_from_environment: + if var_name in os.environ and os.environ[var_name].strip(): + value = os.environ[var_name].strip() + query = select(Variable).where(Variable.user_id == user_id, Variable.name == var_name) + existing = session.exec(query).first() + try: + if existing: + self.update_variable(user_id, var_name, value, session) + else: + self.create_variable( + user_id=user_id, + name=var_name, + value=value, + default_fields=[], + _type=CREDENTIAL_TYPE, + session=session, + ) + logger.info(f"Processed {var_name} variable from environment.") + except Exception as e: # noqa: BLE001 + logger.exception(f"Error processing {var_name} variable: {e!s}") def get_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, field: str, - session: Session = Depends(get_session), + session: Session, ) -> str: # we get the credential from the database # credential = session.query(Variable).filter(Variable.user_id == user_id, Variable.name == name).first() variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() if not variable or not variable.value: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) - if variable.type == CREDENTIAL_TYPE and field == "session_id": # type: ignore - raise TypeError( + if variable.type == CREDENTIAL_TYPE and field == "session_id": + msg = ( f"variable {name} of type 'Credential' cannot be used in a Session ID field " "because its purpose is to prevent the exposure of values." ) + raise TypeError(msg) # we decrypt the value - decrypted = auth_utils.decrypt_api_key(variable.value, settings_service=self.settings_service) - return decrypted + return auth_utils.decrypt_api_key(variable.value, settings_service=self.settings_service) - def get_all(self, user_id: Union[UUID, str], session: Session = Depends(get_session)) -> list[Optional[Variable]]: + def get_all(self, user_id: UUID | str, session: Session) -> list[Variable | None]: return list(session.exec(select(Variable).where(Variable.user_id == user_id)).all()) - def list_variables(self, user_id: Union[UUID, str], session: Session = Depends(get_session)) -> list[Optional[str]]: + def list_variables(self, user_id: UUID | str, session: Session) -> list[str | None]: variables = self.get_all(user_id=user_id, session=session) return [variable.name for variable in variables if variable] def update_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, value: str, - session: Session = Depends(get_session), + session: Session, ): variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() if not variable: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) encrypted = auth_utils.encrypt_api_key(value, settings_service=self.settings_service) variable.value = encrypted session.add(variable) @@ -116,20 +103,22 @@ def update_variable( def update_variable_fields( self, - user_id: Union[UUID, str], - variable_id: Union[UUID, str], + user_id: UUID | str, + variable_id: UUID | str, variable: VariableUpdate, - session: Session = Depends(get_session), + session: Session, ): query = select(Variable).where(Variable.id == variable_id, Variable.user_id == user_id) db_variable = session.exec(query).one() + db_variable.updated_at = datetime.now(timezone.utc) + + variable.value = variable.value or "" + encrypted = auth_utils.encrypt_api_key(variable.value, settings_service=self.settings_service) + variable.value = encrypted variable_data = variable.model_dump(exclude_unset=True) for key, value in variable_data.items(): setattr(db_variable, key, value) - db_variable.updated_at = datetime.now(timezone.utc) - encrypted = auth_utils.encrypt_api_key(db_variable.value, settings_service=self.settings_service) - variable.value = encrypted session.add(db_variable) session.commit() @@ -138,38 +127,41 @@ def update_variable_fields( def delete_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, - session: Session = Depends(get_session), - ): + session: Session, + ) -> None: stmt = select(Variable).where(Variable.user_id == user_id).where(Variable.name == name) variable = session.exec(stmt).first() if not variable: - raise ValueError(f"{name} variable not found.") + msg = f"{name} variable not found." + raise ValueError(msg) session.delete(variable) session.commit() - def delete_variable_by_id(self, user_id: Union[UUID, str], variable_id: UUID, session: Session): + def delete_variable_by_id(self, user_id: UUID | str, variable_id: UUID, session: Session) -> None: variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.id == variable_id)).first() if not variable: - raise ValueError(f"{variable_id} variable not found.") + msg = f"{variable_id} variable not found." + raise ValueError(msg) session.delete(variable) session.commit() def create_variable( self, - user_id: Union[UUID, str], + user_id: UUID | str, name: str, value: str, - default_fields: list[str] = [], + *, + default_fields: Sequence[str] = (), _type: str = GENERIC_TYPE, - session: Session = Depends(get_session), + session: Session, ): variable_base = VariableCreate( name=name, type=_type, value=auth_utils.encrypt_api_key(value, settings_service=self.settings_service), - default_fields=default_fields, + default_fields=list(default_fields), ) variable = Variable.model_validate(variable_base, from_attributes=True, update={"user_id": user_id}) session.add(variable) diff --git a/src/backend/base/langflow/settings.py b/src/backend/base/langflow/settings.py index 9a1d985c359b..14488e84469a 100644 --- a/src/backend/base/langflow/settings.py +++ b/src/backend/base/langflow/settings.py @@ -1,10 +1,10 @@ DEV = False -def _set_dev(value): - global DEV +def _set_dev(value) -> None: + global DEV # noqa: PLW0603 DEV = value -def set_dev(value): +def set_dev(value) -> None: _set_dev(value) diff --git a/src/backend/base/langflow/template/__init__.py b/src/backend/base/langflow/template/__init__.py index 6518b96894b4..f53fb279f94d 100644 --- a/src/backend/base/langflow/template/__init__.py +++ b/src/backend/base/langflow/template/__init__.py @@ -2,10 +2,9 @@ from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template - __all__ = [ + "FrontendNode", "Input", "Output", - "FrontendNode", "Template", ] diff --git a/src/backend/base/langflow/template/field/base.py b/src/backend/base/langflow/template/field/base.py index 4a09c656befa..47c7d02c3862 100644 --- a/src/backend/base/langflow/template/field/base.py +++ b/src/backend/base/langflow/template/field/base.py @@ -1,8 +1,11 @@ +from collections.abc import Callable from enum import Enum -from typing import GenericAlias # type: ignore -from typing import _GenericAlias # type: ignore -from typing import _UnionGenericAlias # type: ignore -from typing import Any, Callable, Optional, Union +from typing import ( # type: ignore[attr-defined] + Any, + GenericAlias, # type: ignore[attr-defined] + _GenericAlias, # type: ignore[attr-defined] + _UnionGenericAlias, # type: ignore[attr-defined] +) from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator, model_serializer, model_validator @@ -46,42 +49,42 @@ class Input(BaseModel): file_types: list[str] = Field(default=[], serialization_alias="fileTypes") """List of file types associated with the field . Default is an empty list.""" - file_path: Optional[str] = "" + file_path: str | None = "" """The file path of the field if it is a file. Defaults to None.""" - password: bool = False - """Specifies if the field is a password. Defaults to False.""" + password: bool | None = None + """Specifies if the field is a password. Defaults to None.""" - options: Optional[Union[list[str], Callable]] = None + options: list[str] | Callable | None = None """List of options for the field. Only used when is_list=True. Default is an empty list.""" - name: Optional[str] = None + name: str | None = None """Name of the field. Default is an empty string.""" - display_name: Optional[str] = None + display_name: str | None = None """Display name of the field. Defaults to None.""" advanced: bool = False """Specifies if the field will an advanced parameter (hidden). Defaults to False.""" - input_types: Optional[list[str]] = None + input_types: list[str] | None = None """List of input types for the handle when the field has more than one type. Default is an empty list.""" dynamic: bool = False """Specifies if the field is dynamic. Defaults to False.""" - info: Optional[str] = "" + info: str | None = "" """Additional information about the field to be shown in the tooltip. Defaults to an empty string.""" - real_time_refresh: Optional[bool] = None + real_time_refresh: bool | None = None """Specifies if the field should have real time refresh. `refresh_button` must be False. Defaults to None.""" - refresh_button: Optional[bool] = None + refresh_button: bool | None = None """Specifies if the field should have a refresh button. Defaults to False.""" - refresh_button_text: Optional[str] = None + refresh_button_text: str | None = None """Specifies the text for the refresh button. Defaults to None.""" - range_spec: Optional[RangeSpec] = Field(default=None, serialization_alias="rangeSpec") + range_spec: RangeSpec | None = Field(default=None, serialization_alias="rangeSpec") """Range specification for the field. Defaults to None.""" load_from_db: bool = False @@ -96,9 +99,8 @@ def to_dict(self): def serialize_model(self, handler): result = handler(self) # If the field is str, we add the Text input type - if self.field_type in ["str", "Text"]: - if "input_types" not in result: - result["input_types"] = ["Text"] + if self.field_type in {"str", "Text"} and "input_types" not in result: + result["input_types"] = ["Text"] if self.field_type == Text: result["type"] = "str" else: @@ -118,7 +120,7 @@ def serialize_file_path(self, value): @field_serializer("field_type") def serialize_field_type(self, value, _info): - if value == float and self.range_spec is None: + if value is float and self.range_spec is None: self.range_spec = RangeSpec() return value @@ -135,9 +137,11 @@ def serialize_display_name(self, value, _info): return value @field_validator("file_types") + @classmethod def validate_file_types(cls, value): if not isinstance(value, list): - raise ValueError("file_types must be a list") + msg = "file_types must be a list" + raise ValueError(msg) # noqa: TRY004 return [ (f".{file_type}" if isinstance(file_type, str) and not file_type.startswith(".") else file_type) for file_type in value @@ -149,11 +153,12 @@ def validate_type(cls, v): # If the user passes CustomComponent as a type insteado of "CustomComponent" we need to convert it to a string # this should be done for all types # How to check if v is a type? - if isinstance(v, (type, _GenericAlias, GenericAlias, _UnionGenericAlias)): + if isinstance(v, type | _GenericAlias | GenericAlias | _UnionGenericAlias): v = post_process_type(v)[0] v = format_type(v) elif not isinstance(v, str): - raise ValueError(f"type must be a string or a type, not {type(v)}") + msg = f"type must be a string or a type, not {type(v)}" + raise ValueError(msg) # noqa: TRY004 return v @@ -161,34 +166,38 @@ class Output(BaseModel): types: list[str] = Field(default=[]) """List of output types for the field.""" - selected: Optional[str] = Field(default=None) + selected: str | None = Field(default=None) """The selected output type for the field.""" name: str = Field(description="The name of the field.") """The name of the field.""" - hidden: Optional[bool] = Field(default=None) + hidden: bool | None = Field(default=None) """Dictates if the field is hidden.""" - display_name: Optional[str] = Field(default=None) + display_name: str | None = Field(default=None) """The display name of the field.""" - method: Optional[str] = Field(default=None) + method: str | None = Field(default=None) """The method to use for the output.""" - value: Optional[Any] = Field(default=UNDEFINED) + value: Any | None = Field(default=UNDEFINED) + """The result of the Output. Dynamically updated as execution occurs.""" cache: bool = Field(default=True) + required_inputs: list[str] | None = Field(default=None) + """List of required inputs for this output.""" + def to_dict(self): return self.model_dump(by_alias=True, exclude_none=True) - def add_types(self, _type: list[Any]): + def add_types(self, _type: list[Any]) -> None: if self.types is None: self.types = [] self.types.extend([t for t in _type if t not in self.types]) - def set_selected(self): + def set_selected(self) -> None: if not self.selected and self.types: self.selected = self.types[0] @@ -205,7 +214,8 @@ def validate_model(self): if self.value == UNDEFINED.value: self.value = UNDEFINED if self.name is None: - raise ValueError("name must be set") + msg = "name must be set" + raise ValueError(msg) if self.display_name is None: self.display_name = self.name return self diff --git a/src/backend/base/langflow/template/field/prompt.py b/src/backend/base/langflow/template/field/prompt.py index aed416cbca04..b04329cf622c 100644 --- a/src/backend/base/langflow/template/field/prompt.py +++ b/src/backend/base/langflow/template/field/prompt.py @@ -1,3 +1,2 @@ # This file is for backwards compatibility -from langflow.inputs.inputs import DEFAULT_PROMPT_INTUT_TYPES # noqa -from langflow.inputs.inputs import DefaultPromptField # noqa +from langflow.inputs.inputs import DEFAULT_PROMPT_INTUT_TYPES, DefaultPromptField # noqa: F401 diff --git a/src/backend/base/langflow/template/frontend_node/base.py b/src/backend/base/langflow/template/frontend_node/base.py index 199233ee9b60..43a3e4ce65b1 100644 --- a/src/backend/base/langflow/template/frontend_node/base.py +++ b/src/backend/base/langflow/template/frontend_node/base.py @@ -1,5 +1,4 @@ from collections import defaultdict -from typing import Dict, List, Optional, Union from pydantic import BaseModel, field_serializer, model_serializer @@ -11,65 +10,68 @@ class FrontendNode(BaseModel): _format_template: bool = True template: Template """Template for the frontend node.""" - description: Optional[str] = None + description: str | None = None """Description of the frontend node.""" - icon: Optional[str] = None + icon: str | None = None """Icon of the frontend node.""" - is_input: Optional[bool] = None + is_input: bool | None = None """Whether the frontend node is used as an input when processing the Graph. If True, there should be a field named 'input_value'.""" - is_output: Optional[bool] = None + is_output: bool | None = None """Whether the frontend node is used as an output when processing the Graph. If True, there should be a field named 'input_value'.""" - is_composition: Optional[bool] = None + is_composition: bool | None = None """Whether the frontend node is used for composition.""" - base_classes: List[str] + base_classes: list[str] """List of base classes for the frontend node.""" name: str = "" """Name of the frontend node.""" - display_name: Optional[str] = "" + display_name: str | None = "" """Display name of the frontend node.""" documentation: str = "" """Documentation of the frontend node.""" - custom_fields: Optional[Dict] = defaultdict(list) + custom_fields: dict | None = defaultdict(list) """Custom fields of the frontend node.""" - output_types: List[str] = [] + output_types: list[str] = [] """List of output types for the frontend node.""" - full_path: Optional[str] = None + full_path: str | None = None """Full path of the frontend node.""" pinned: bool = False """Whether the frontend node is pinned.""" - conditional_paths: List[str] = [] + conditional_paths: list[str] = [] """List of conditional paths for the frontend node.""" frozen: bool = False """Whether the frontend node is frozen.""" - outputs: List[Output] = [] + outputs: list[Output] = [] """List of output fields for the frontend node.""" field_order: list[str] = [] """Order of the fields in the frontend node.""" beta: bool = False """Whether the frontend node is in beta.""" - error: Optional[str] = None + legacy: bool = False + """Whether the frontend node is legacy.""" + error: str | None = None """Error message for the frontend node.""" edited: bool = False """Whether the frontend node has been edited.""" + metadata: dict = {} + """Metadata for the component node.""" + tool_mode: bool = False + """Whether the frontend node is in tool mode.""" def set_documentation(self, documentation: str) -> None: """Sets the documentation of the frontend node.""" self.documentation = documentation @field_serializer("base_classes") - def process_base_classes(self, base_classes: List[str]) -> List[str]: + def process_base_classes(self, base_classes: list[str]) -> list[str]: """Removes unwanted base classes from the list of base classes.""" - - sorted_base_classes = sorted(list(set(base_classes)), key=lambda x: x.lower()) - return sorted_base_classes + return sorted(set(base_classes), key=lambda x: x.lower()) @field_serializer("display_name") def process_display_name(self, display_name: str) -> str: """Sets the display name of the frontend node.""" - return display_name or self.name @model_serializer(mode="wrap") @@ -83,7 +85,10 @@ def serialize_model(self, handler): if "output_types" in result and not result.get("outputs"): for base_class in result["output_types"]: output = Output( - display_name=base_class, name=base_class.lower(), types=[base_class], selected=base_class + display_name=base_class, + name=base_class.lower(), + types=[base_class], + selected=base_class, ) result["outputs"].append(output.model_dump()) @@ -96,7 +101,7 @@ def from_dict(cls, data: dict) -> "FrontendNode": return cls(**data) # For backwards compatibility - def to_dict(self, keep_name=True) -> dict: + def to_dict(self, *, keep_name=True) -> dict: """Returns a dict representation of the frontend node.""" dump = self.model_dump(by_alias=True, exclude_none=True) if not keep_name: @@ -109,7 +114,7 @@ def add_extra_fields(self) -> None: def add_extra_base_classes(self) -> None: pass - def set_base_classes_from_outputs(self): + def set_base_classes_from_outputs(self) -> None: self.base_classes = [output_type for output in self.outputs for output_type in output.types] def validate_component(self) -> None: @@ -122,14 +127,13 @@ def validate_name_overlap(self) -> None: input_names = [input_.name for input_ in self.template.fields] overlap = set(output_names).intersection(input_names) if overlap: - overlap_str = ", ".join(map(lambda x: f"'{x}'", overlap)) - raise ValueError( - f"There should be no overlap between input and output names. Names {overlap_str} are duplicated." - ) + overlap_str = ", ".join(f"'{x}'" for x in overlap) + msg = f"There should be no overlap between input and output names. Names {overlap_str} are duplicated." + raise ValueError(msg) def validate_attributes(self) -> None: - # None of inputs, outputs, _artifacts, _results, logs, status, vertex, graph, display_name, description, documentation, icon - # should be present in outputs or input names + # None of inputs, outputs, _artifacts, _results, logs, status, vertex, graph, display_name, description, + # documentation, icon should be present in outputs or input names output_names = [output.name for output in self.outputs] input_names = [input_.name for input_ in self.template.fields] attributes = [ @@ -150,20 +154,20 @@ def validate_attributes(self) -> None: input_overlap = set(input_names).intersection(attributes) error_message = "" if output_overlap: - output_overlap_str = ", ".join(map(lambda x: f"'{x}'", output_overlap)) + output_overlap_str = ", ".join(f"'{x}'" for x in output_overlap) error_message += f"Output names {output_overlap_str} are reserved attributes.\n" if input_overlap: - input_overlap_str = ", ".join(map(lambda x: f"'{x}'", input_overlap)) + input_overlap_str = ", ".join(f"'{x}'" for x in input_overlap) error_message += f"Input names {input_overlap_str} are reserved attributes." - def add_base_class(self, base_class: Union[str, List[str]]) -> None: + def add_base_class(self, base_class: str | list[str]) -> None: """Adds a base class to the frontend node.""" if isinstance(base_class, str): self.base_classes.append(base_class) elif isinstance(base_class, list): self.base_classes.extend(base_class) - def add_output_type(self, output_type: Union[str, List[str]]) -> None: + def add_output_type(self, output_type: str | list[str]) -> None: """Adds an output type to the frontend node.""" if isinstance(output_type, str): self.output_types.append(output_type) @@ -174,14 +178,23 @@ def add_output_type(self, output_type: Union[str, List[str]]) -> None: def from_inputs(cls, **kwargs): """Create a frontend node from inputs.""" if "inputs" not in kwargs: - raise ValueError("Missing 'inputs' argument.") + msg = "Missing 'inputs' argument." + raise ValueError(msg) + if "_outputs_map" in kwargs: + kwargs["outputs"] = kwargs.pop("_outputs_map") inputs = kwargs.pop("inputs") template = Template(type_name="Component", fields=inputs) kwargs["template"] = template return cls(**kwargs) - def set_field_value_in_template(self, field_name, value): + def set_field_value_in_template(self, field_name, value) -> None: for field in self.template.fields: if field.name == field_name: field.value = value break + + def set_field_load_from_db_in_template(self, field_name, value) -> None: + for field in self.template.fields: + if field.name == field_name and hasattr(field, "load_from_db"): + field.load_from_db = value + break diff --git a/src/backend/base/langflow/template/frontend_node/custom_components.py b/src/backend/base/langflow/template/frontend_node/custom_components.py index 2fd1e9cdf792..b9a820070e75 100644 --- a/src/backend/base/langflow/template/frontend_node/custom_components.py +++ b/src/backend/base/langflow/template/frontend_node/custom_components.py @@ -1,5 +1,3 @@ -from typing import Optional - from langflow.template.field.base import Input from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template @@ -47,8 +45,9 @@ def build(self, param: Data) -> Data: class CustomComponentFrontendNode(FrontendNode): _format_template: bool = False name: str = "CustomComponent" - display_name: Optional[str] = "CustomComponent" + display_name: str | None = "CustomComponent" beta: bool = False + legacy: bool = False template: Template = Template( type_name="CustomComponent", fields=[ @@ -65,15 +64,16 @@ class CustomComponentFrontendNode(FrontendNode): ) ], ) - description: Optional[str] = None + description: str | None = None base_classes: list[str] = [] class ComponentFrontendNode(FrontendNode): _format_template: bool = False name: str = "Component" - display_name: Optional[str] = "Component" + display_name: str | None = "Component" beta: bool = False + legacy: bool = False template: Template = Template( type_name="Component", fields=[ @@ -90,5 +90,5 @@ class ComponentFrontendNode(FrontendNode): ) ], ) - description: Optional[str] = None + description: str | None = None base_classes: list[str] = [] diff --git a/src/backend/base/langflow/template/template/base.py b/src/backend/base/langflow/template/template/base.py index a76e2d6de9b0..f526177e25a1 100644 --- a/src/backend/base/langflow/template/template/base.py +++ b/src/backend/base/langflow/template/template/base.py @@ -1,8 +1,10 @@ -from typing import Callable, Union, cast +from collections.abc import Callable +from typing import cast from pydantic import BaseModel, Field, model_serializer -from langflow.inputs.inputs import InputTypes, instantiate_input +from langflow.inputs.inputs import InputTypes +from langflow.inputs.utils import instantiate_input from langflow.template.field.base import Input from langflow.utils.constants import DIRECT_TYPES @@ -13,16 +15,16 @@ class Template(BaseModel): def process_fields( self, - format_field_func: Union[Callable, None] = None, - ): + format_field_func: Callable | None = None, + ) -> None: if format_field_func: for field in self.fields: format_field_func(field, self.type_name) - def sort_fields(self): + def sort_fields(self) -> None: # first sort alphabetically # then sort fields so that fields that have .field_type in DIRECT_TYPES are first - self.fields.sort(key=lambda x: x.name) + self.fields.sort(key=lambda x: x.name or "") self.fields.sort( key=lambda x: x.field_type in DIRECT_TYPES if hasattr(x, "field_type") else False, reverse=False ) @@ -50,11 +52,17 @@ def from_dict(cls, data: dict) -> "Template": try: _input = instantiate_input(input_type, value) except Exception as e: - raise ValueError(f"Error instantiating input {input_type}: {e}") + msg = f"Error instantiating input {input_type}: {e}" + raise ValueError(msg) from e else: _input = Input(**value) data["fields"].append(_input) + + # Necessary for components with no inputs(?) + if "fields" not in data: + data["fields"] = [] + return cls(**data) # For backwards compatibility @@ -70,7 +78,8 @@ def get_field(self, field_name: str) -> Input: """Returns the field with the given name.""" field = next((field for field in self.fields if field.name == field_name), None) if field is None: - raise ValueError(f"Field {field_name} not found in template {self.type_name}") + msg = f"Field {field_name} not found in template {self.type_name}" + raise ValueError(msg) return cast(Input, field) def update_field(self, field_name: str, field: Input) -> None: @@ -79,7 +88,8 @@ def update_field(self, field_name: str, field: Input) -> None: if template_field.name == field_name: self.fields[idx] = field return - raise ValueError(f"Field {field_name} not found in template {self.type_name}") + msg = f"Field {field_name} not found in template {self.type_name}" + raise ValueError(msg) def upsert_field(self, field_name: str, field: Input) -> None: """Updates the field with the given name or adds it if it doesn't exist.""" diff --git a/src/backend/base/langflow/template/utils.py b/src/backend/base/langflow/template/utils.py index b9aef4d3de52..8f92df5ab59a 100644 --- a/src/backend/base/langflow/template/utils.py +++ b/src/backend/base/langflow/template/utils.py @@ -27,7 +27,7 @@ def get_file_path_value(file_path): return file_path -def update_template_field(new_template, key, previous_value_dict): +def update_template_field(new_template, key, previous_value_dict) -> None: """Updates a specific field in the frontend template.""" template_field = new_template.get(key) if not template_field or template_field.get("type") != previous_value_dict.get("type"): @@ -41,7 +41,7 @@ def update_template_field(new_template, key, previous_value_dict): template_field["load_from_db"] = previous_value_dict.get("load_from_db", False) template_field["value"] = previous_value_dict["value"] - if "file_path" in previous_value_dict and previous_value_dict["file_path"]: + if previous_value_dict.get("file_path"): file_path_value = get_file_path_value(previous_value_dict["file_path"]) if not file_path_value: # If the file does not exist, remove the value from the template_field["value"] @@ -51,11 +51,10 @@ def update_template_field(new_template, key, previous_value_dict): def is_valid_data(frontend_node, raw_frontend_data): """Check if the data is valid for processing.""" - return frontend_node and "template" in frontend_node and raw_frontend_data_is_valid(raw_frontend_data) -def update_template_values(new_template, previous_template): +def update_template_values(new_template, previous_template) -> None: """Updates the frontend template with values from the raw template.""" for key, previous_value_dict in previous_template.items(): if key == "code" or not isinstance(previous_value_dict, dict): @@ -65,8 +64,7 @@ def update_template_values(new_template, previous_template): def update_frontend_node_with_template_values(frontend_node, raw_frontend_node): - """ - Updates the given frontend node with values from the raw template data. + """Updates the given frontend node with values from the raw template data. :param frontend_node: A dict representing a built frontend node. :param raw_template_data: A dict representing raw template data. diff --git a/src/backend/base/langflow/type_extraction/type_extraction.py b/src/backend/base/langflow/type_extraction/type_extraction.py index 6474977a3760..9a2725c03afe 100644 --- a/src/backend/base/langflow/type_extraction/type_extraction.py +++ b/src/backend/base/langflow/type_extraction/type_extraction.py @@ -2,31 +2,25 @@ from collections.abc import Sequence as SequenceABC from itertools import chain from types import GenericAlias -from typing import Any, List, Union +from typing import Any, Union def extract_inner_type_from_generic_alias(return_type: GenericAlias) -> Any: - """ - Extracts the inner type from a type hint that is a list or a Optional. - """ - if return_type.__origin__ in [list, SequenceABC]: + """Extracts the inner type from a type hint that is a list or a Optional.""" + if return_type.__origin__ in {list, SequenceABC}: return list(return_type.__args__) return return_type def extract_inner_type(return_type: str) -> str: - """ - Extracts the inner type from a type hint that is a list. - """ + """Extracts the inner type from a type hint that is a list.""" if match := re.match(r"list\[(.*)\]", return_type, re.IGNORECASE): return match[1] return return_type def extract_union_types(return_type: str) -> list[str]: - """ - Extracts the inner type from a type hint that is a list. - """ + """Extracts the inner type from a type hint that is a list.""" # If the return type is a Union, then we need to parse it return_type = return_type.replace("Union", "").replace("[", "").replace("]", "") return_types = return_type.split(",") @@ -34,23 +28,20 @@ def extract_union_types(return_type: str) -> list[str]: def extract_uniont_types_from_generic_alias(return_type: GenericAlias) -> list: - """ - Extracts the inner type from a type hint that is a Union. - """ + """Extracts the inner type from a type hint that is a Union.""" if isinstance(return_type, list): return [ _inner_arg for _type in return_type for _inner_arg in _type.__args__ - if _inner_arg not in set((Any, type(None), type(Any))) + if _inner_arg not in {Any, type(None), type(Any)} ] return list(return_type.__args__) def post_process_type(_type): - """ - Process the return type of a function. + """Process the return type of a function. Args: _type (Any): The return type of the function. @@ -59,12 +50,14 @@ def post_process_type(_type): Union[List[Any], Any]: The processed return type. """ - if hasattr(_type, "__origin__") and _type.__origin__ in [list, List, SequenceABC]: + if hasattr(_type, "__origin__") and _type.__origin__ in {list, list, SequenceABC}: _type = extract_inner_type_from_generic_alias(_type) # If the return type is not a Union, then we just return it as a list inner_type = _type[0] if isinstance(_type, list) else _type - if not hasattr(inner_type, "__origin__") or inner_type.__origin__ != Union: + if (not hasattr(inner_type, "__origin__") or inner_type.__origin__ != Union) and ( + not hasattr(inner_type, "__class__") or inner_type.__class__.__name__ != "UnionType" + ): return _type if isinstance(_type, list) else [_type] # If the return type is a Union, then we need to parse it _type = extract_union_types_from_generic_alias(_type) @@ -73,15 +66,13 @@ def post_process_type(_type): def extract_union_types_from_generic_alias(return_type: GenericAlias) -> list: - """ - Extracts the inner type from a type hint that is a Union. - """ + """Extracts the inner type from a type hint that is a Union.""" if isinstance(return_type, list): return [ _inner_arg for _type in return_type for _inner_arg in _type.__args__ - if _inner_arg not in set((Any, type(None), type(Any))) + if _inner_arg not in {Any, type(None), type(Any)} ] return list(return_type.__args__) diff --git a/src/backend/base/langflow/utils/async_helpers.py b/src/backend/base/langflow/utils/async_helpers.py index 25ce544510af..04cfbacaef26 100644 --- a/src/backend/base/langflow/utils/async_helpers.py +++ b/src/backend/base/langflow/utils/async_helpers.py @@ -8,8 +8,7 @@ def run_until_complete(coro): if loop.is_running(): # Run the coroutine in a separate event loop in a new thread return run_in_thread(coro) - else: - return loop.run_until_complete(coro) + return loop.run_until_complete(coro) except RuntimeError: # If there's no event loop, create a new one and run the coroutine return asyncio.run(coro) @@ -19,11 +18,11 @@ def run_in_thread(coro): result = None exception = None - def target(): + def target() -> None: nonlocal result, exception try: result = asyncio.run(coro) - except Exception as e: + except Exception as e: # noqa: BLE001 exception = e thread = threading.Thread(target=target) diff --git a/src/backend/base/langflow/utils/concurrency.py b/src/backend/base/langflow/utils/concurrency.py index a0f810a989fa..4d47aa916cf6 100644 --- a/src/backend/base/langflow/utils/concurrency.py +++ b/src/backend/base/langflow/utils/concurrency.py @@ -2,18 +2,16 @@ import threading from contextlib import contextmanager from pathlib import Path -from filelock import FileLock +from filelock import FileLock from platformdirs import user_cache_dir class KeyedMemoryLockManager: - """ - A manager for acquiring and releasing memory locks based on a key - """ + """A manager for acquiring and releasing memory locks based on a key.""" - def __init__(self): - self.locks = {} + def __init__(self) -> None: + self.locks: dict[str, threading.Lock] = {} self.global_lock = threading.Lock() def _get_lock(self, key: str): @@ -33,16 +31,13 @@ def lock(self, key: str): class KeyedWorkerLockManager: - """ - A manager for acquiring locks between workers based on a key - """ + """A manager for acquiring locks between workers based on a key.""" - def __init__(self): + def __init__(self) -> None: self.locks_dir = Path(user_cache_dir("langflow"), ensure_exists=True) / "worker_locks" def _validate_key(self, key: str) -> bool: - """ - Validate that the string only contains alphanumeric characters and underscores. + """Validate that the string only contains alphanumeric characters and underscores. Parameters: s (str): The string to validate. @@ -56,7 +51,8 @@ def _validate_key(self, key: str) -> bool: @contextmanager def lock(self, key: str): if not self._validate_key(key): - raise ValueError(f"Invalid key: {key}") + msg = f"Invalid key: {key}" + raise ValueError(msg) lock = FileLock(self.locks_dir / key) with lock: diff --git a/src/backend/base/langflow/utils/connection_string_parser.py b/src/backend/base/langflow/utils/connection_string_parser.py index a67d2059eba9..a334cef28e48 100644 --- a/src/backend/base/langflow/utils/connection_string_parser.py +++ b/src/backend/base/langflow/utils/connection_string_parser.py @@ -1,9 +1,8 @@ from urllib.parse import quote -def transform_connection_string(connection_string): +def transform_connection_string(connection_string) -> str: auth_part, db_url_name = connection_string.rsplit("@", 1) protocol_user, password_string = auth_part.rsplit(":", 1) encoded_password = quote(password_string) - transformed_connection_string = f"{protocol_user}:{encoded_password}@{db_url_name}" - return transformed_connection_string + return f"{protocol_user}:{encoded_password}@{db_url_name}" diff --git a/src/backend/base/langflow/utils/constants.py b/src/backend/base/langflow/utils/constants.py index 0e3777fd8265..1b720652adc7 100644 --- a/src/backend/base/langflow/utils/constants.py +++ b/src/backend/base/langflow/utils/constants.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any OPENAI_MODELS = [ "text-davinci-003", @@ -62,10 +62,11 @@ def python_function(text: str) -> str: "prompt", "code", "NestedDict", + "table", ] -LOADERS_INFO: List[Dict[str, Any]] = [ +LOADERS_INFO: list[dict[str, Any]] = [ { "loader": "AirbyteJSONLoader", "name": "Airbyte JSON (.jsonl)", @@ -183,3 +184,5 @@ def python_function(text: str) -> str: MESSAGE_SENDER_USER = "User" MESSAGE_SENDER_NAME_AI = "AI" MESSAGE_SENDER_NAME_USER = "User" + +MAX_TEXT_LENGTH = 99999 diff --git a/src/backend/base/langflow/utils/image.py b/src/backend/base/langflow/utils/image.py new file mode 100644 index 000000000000..65add51c051f --- /dev/null +++ b/src/backend/base/langflow/utils/image.py @@ -0,0 +1,69 @@ +import base64 +import mimetypes +from pathlib import Path + + +def convert_image_to_base64(image_path: str | Path) -> str: + """Convert an image file to a base64 encoded string. + + Args: + image_path (str | Path): Path to the image file. + + Returns: + str: Base64 encoded string representation of the image. + + Raises: + FileNotFoundError: If the image file does not exist. + IOError: If there's an error reading the image file. + ValueError: If the image path is empty or invalid. + """ + if not image_path: + msg = "Image path cannot be empty" + raise ValueError(msg) + + image_path = Path(image_path) + + if not image_path.exists(): + msg = f"Image file not found: {image_path}" + raise FileNotFoundError(msg) + + if not image_path.is_file(): + msg = f"Path is not a file: {image_path}" + raise ValueError(msg) + + try: + with image_path.open("rb") as image_file: + return base64.b64encode(image_file.read()).decode("utf-8") + except OSError as e: + msg = f"Error reading image file: {e}" + raise OSError(msg) from e + + +def create_data_url(image_path: str | Path, mime_type: str | None = None) -> str: + """Create a data URL from an image file. + + Args: + image_path (str | Path): Path to the image file. + mime_type (Optional[str], optional): MIME type of the image. + If None, it will be guessed from the file extension. + + Returns: + str: Data URL containing the base64 encoded image. + + Raises: + FileNotFoundError: If the image file does not exist. + IOError: If there's an error reading the image file. + ValueError: If the image path is empty or invalid. + """ + if not mime_type: + mime_type = mimetypes.guess_type(str(image_path))[0] + if not mime_type: + msg = f"Could not determine MIME type for: {image_path}" + raise ValueError(msg) + + try: + base64_data = convert_image_to_base64(image_path) + except (OSError, FileNotFoundError, ValueError) as e: + msg = f"Failed to create data URL: {e}" + raise type(e)(msg) from e + return f"data:{mime_type};base64,{base64_data}" diff --git a/src/backend/base/langflow/utils/lazy_load.py b/src/backend/base/langflow/utils/lazy_load.py index df0130acc5f5..ebacd3480c87 100644 --- a/src/backend/base/langflow/utils/lazy_load.py +++ b/src/backend/base/langflow/utils/lazy_load.py @@ -1,5 +1,5 @@ class LazyLoadDictBase: - def __init__(self): + def __init__(self) -> None: self._all_types_dict = None @property diff --git a/src/backend/base/langflow/utils/migration.py b/src/backend/base/langflow/utils/migration.py index b85522c5b153..47fe889571b5 100644 --- a/src/backend/base/langflow/utils/migration.py +++ b/src/backend/base/langflow/utils/migration.py @@ -2,8 +2,7 @@ def table_exists(name, conn): - """ - Check if a table exists. + """Check if a table exists. Parameters: name (str): The name of the table to check. @@ -17,8 +16,7 @@ def table_exists(name, conn): def column_exists(table_name, column_name, conn): - """ - Check if a column exists in a table. + """Check if a column exists in a table. Parameters: table_name (str): The name of the table to check. @@ -33,8 +31,7 @@ def column_exists(table_name, column_name, conn): def foreign_key_exists(table_name, fk_name, conn): - """ - Check if a foreign key exists in a table. + """Check if a foreign key exists in a table. Parameters: table_name (str): The name of the table to check. @@ -49,8 +46,7 @@ def foreign_key_exists(table_name, fk_name, conn): def constraint_exists(table_name, constraint_name, conn): - """ - Check if a constraint exists in a table. + """Check if a constraint exists in a table. Parameters: table_name (str): The name of the table to check. diff --git a/src/backend/base/langflow/utils/payload.py b/src/backend/base/langflow/utils/payload.py index 0e2f0fc7ae7f..f5f3e9c0d012 100644 --- a/src/backend/base/langflow/utils/payload.py +++ b/src/backend/base/langflow/utils/payload.py @@ -1,13 +1,9 @@ import contextlib import re -from typing import Dict def extract_input_variables(nodes): - """ - Extracts input variables from the template - and adds them to the input_variables field. - """ + """Extracts input variables from the template and adds them to the input_variables field.""" for node in nodes: with contextlib.suppress(Exception): if "input_variables" in node["data"]["node"]["template"]: @@ -29,9 +25,7 @@ def extract_input_variables(nodes): def get_root_vertex(graph): - """ - Returns the root node of the template. - """ + """Returns the root node of the template.""" incoming_edges = {edge.source_id for edge in graph.edges} if not incoming_edges and len(graph.vertices) == 1: @@ -40,7 +34,7 @@ def get_root_vertex(graph): return next((node for node in graph.vertices if node.id not in incoming_edges), None) -def build_json(root, graph) -> Dict: +def build_json(root, graph) -> dict: if "node" not in root.data: # If the root node has no "node" key, then it has only one child, # which is the target of the single outgoing edge @@ -58,10 +52,11 @@ def build_json(root, graph) -> Dict: template = root.data["node"]["template"] final_dict = template.copy() - for key, value in final_dict.items(): + for key in final_dict: if key == "_type": continue + value = final_dict[key] node_type = value["type"] if "value" in value and value["value"] is not None: @@ -78,10 +73,11 @@ def build_json(root, graph) -> Dict: children.extend(node_children) if value["required"] and not children: - raise ValueError(f"No child with type {node_type} found") + msg = f"No child with type {node_type} found" + raise ValueError(msg) values = [build_json(child, graph) for child in children] value = ( - list(values) if value["list"] else next(iter(values), None) # type: ignore + list(values) if value["list"] else next(iter(values), None) # type: ignore[arg-type] ) final_dict[key] = value diff --git a/src/backend/base/langflow/utils/schemas.py b/src/backend/base/langflow/utils/schemas.py index 2689aeb565dc..76b494af9baa 100644 --- a/src/backend/base/langflow/utils/schemas.py +++ b/src/backend/base/langflow/utils/schemas.py @@ -1,5 +1,4 @@ import enum -from typing import Dict, List, Optional, Union from langchain_core.messages import BaseMessage from pydantic import BaseModel, field_validator, model_validator @@ -20,16 +19,17 @@ class File(TypedDict): class ChatOutputResponse(BaseModel): """Chat output response schema.""" - message: Union[str, List[Union[str, Dict]]] - sender: Optional[str] = MESSAGE_SENDER_AI - sender_name: Optional[str] = MESSAGE_SENDER_NAME_AI - session_id: Optional[str] = None - stream_url: Optional[str] = None - component_id: Optional[str] = None - files: List[File] = [] + message: str | list[str | dict] + sender: str | None = MESSAGE_SENDER_AI + sender_name: str | None = MESSAGE_SENDER_NAME_AI + session_id: str | None = None + stream_url: str | None = None + component_id: str | None = None + files: list[File] = [] type: str @field_validator("files", mode="before") + @classmethod def validate_files(cls, files): """Validate files.""" if not files: @@ -37,14 +37,16 @@ def validate_files(cls, files): for file in files: if not isinstance(file, dict): - raise ValueError("Files must be a list of dictionaries.") + msg = "Files must be a list of dictionaries." + raise ValueError(msg) # noqa: TRY004 if not all(key in file for key in ["path", "name", "type"]): # If any of the keys are missing, we should extract the # values from the file path path = file.get("path") if not path: - raise ValueError("File path is required.") + msg = "File path is required." + raise ValueError(msg) name = file.get("name") if not name: @@ -63,7 +65,8 @@ def validate_files(cls, files): _type = file_type break if not _type: - raise ValueError("File type is required.") + msg = "File type is required." + raise ValueError(msg) file["type"] = _type return files @@ -72,12 +75,12 @@ def validate_files(cls, files): def from_message( cls, message: BaseMessage, - sender: Optional[str] = MESSAGE_SENDER_AI, - sender_name: Optional[str] = MESSAGE_SENDER_NAME_AI, + sender: str | None = MESSAGE_SENDER_AI, + sender_name: str | None = MESSAGE_SENDER_NAME_AI, ): """Build chat output response from message.""" content = message.content - return cls(message=content, sender=sender, sender_name=sender_name) # type: ignore + return cls(message=content, sender=sender, sender_name=sender_name) @model_validator(mode="after") def validate_message(self): @@ -101,11 +104,11 @@ def validate_message(self): class DataOutputResponse(BaseModel): """Data output response schema.""" - data: List[Optional[Dict]] + data: list[dict | None] class ContainsEnumMeta(enum.EnumMeta): - def __contains__(cls, item): + def __contains__(cls, item) -> bool: try: cls(item) except ValueError: diff --git a/src/backend/base/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py index aa0cea81210d..219025013050 100644 --- a/src/backend/base/langflow/utils/util.py +++ b/src/backend/base/langflow/utils/util.py @@ -5,7 +5,7 @@ import re from functools import wraps from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any from docstring_parser import parse @@ -25,12 +25,13 @@ def remove_ansi_escape_codes(text): return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text) -def build_template_from_function(name: str, type_to_loader_dict: Dict, add_function: bool = False): +def build_template_from_function(name: str, type_to_loader_dict: dict, *, add_function: bool = False): classes = [item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()] # Raise error if name is not in chains if name not in classes: - raise ValueError(f"{name} not found") + msg = f"{name} not found" + raise ValueError(msg) for _type, v in type_to_loader_dict.items(): if v.__annotations__["return"].__name__ == name: @@ -41,7 +42,7 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct variables = {"_type": _type} for class_field_items, value in _class.model_fields.items(): - if class_field_items in ["callback_manager"]: + if class_field_items == "callback_manager": continue variables[class_field_items] = {} for name_, value_ in value.__repr_args__(): @@ -50,14 +51,13 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct variables[class_field_items]["default"] = get_default_factory( module=_class.__base__.__module__, function=value_ ) - except Exception: + except Exception: # noqa: BLE001 + logger.opt(exception=True).debug(f"Error getting default factory for {value_}") variables[class_field_items]["default"] = None - elif name_ not in ["name"]: + elif name_ != "name": variables[class_field_items][name_] = value_ - variables[class_field_items]["placeholder"] = ( - docs.params[class_field_items] if class_field_items in docs.params else "" - ) + variables[class_field_items]["placeholder"] = docs.params.get(class_field_items, "") # Adding function to base classes to allow # the output to be a function base_classes = get_base_classes(_class) @@ -69,19 +69,22 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct "description": docs.short_description or "", "base_classes": base_classes, } + return None def build_template_from_method( class_name: str, method_name: str, - type_to_cls_dict: Dict, + type_to_cls_dict: dict, + *, add_function: bool = False, ): classes = [item.__name__ for item in type_to_cls_dict.values()] # Raise error if class_name is not in classes if class_name not in classes: - raise ValueError(f"{class_name} not found.") + msg = f"{class_name} not found." + raise ValueError(msg) for _type, v in type_to_cls_dict.items(): if v.__name__ == class_name: @@ -89,7 +92,8 @@ def build_template_from_method( # Check if the method exists in this class if not hasattr(_class, method_name): - raise ValueError(f"Method {method_name} not found in class {class_name}") + msg = f"Method {method_name} not found in class {class_name}" + raise ValueError(msg) # Get the method method = getattr(_class, method_name) @@ -113,7 +117,7 @@ def build_template_from_method( "required": param.default == param.empty, } for name, param in params.items() - if name not in ["self", "kwargs", "args"] + if name not in {"self", "kwargs", "args"} }, } @@ -128,18 +132,19 @@ def build_template_from_method( "description": docs.short_description or "", "base_classes": base_classes, } + return None def get_base_classes(cls): """Get the base classes of a class. + These are used to determine the output of the nodes. """ - if hasattr(cls, "__bases__") and cls.__bases__: bases = cls.__bases__ result = [] for base in bases: - if any(type in base.__module__ for type in ["pydantic", "abc"]): + if any(_type in base.__module__ for _type in ["pydantic", "abc"]): continue result.append(base.__name__) base_classes = get_base_classes(base) @@ -152,7 +157,7 @@ def get_base_classes(cls): result = [cls.__name__] if not result: result = [cls.__name__] - return list(set(result + [cls.__name__])) + return list({*result, cls.__name__}) def get_default_factory(module: str, function: str): @@ -164,9 +169,8 @@ def get_default_factory(module: str, function: str): return None -def update_verbose(d: dict, new_value: bool) -> dict: - """ - Recursively updates the value of the 'verbose' key in a dictionary. +def update_verbose(d: dict, *, new_value: bool) -> dict: + """Recursively updates the value of the 'verbose' key in a dictionary. Args: d: the dictionary to update @@ -175,19 +179,16 @@ def update_verbose(d: dict, new_value: bool) -> dict: Returns: The updated dictionary. """ - for k, v in d.items(): if isinstance(v, dict): - update_verbose(v, new_value) + update_verbose(v, new_value=new_value) elif k == "verbose": d[k] = new_value return d def sync_to_async(func): - """ - Decorator to convert a sync function to an async function. - """ + """Decorator to convert a sync function to an async function.""" @wraps(func) async def async_wrapper(*args, **kwargs): @@ -196,20 +197,17 @@ async def async_wrapper(*args, **kwargs): return async_wrapper -def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) -> Dict[str, Any]: - """ - Formats a dictionary by removing certain keys and modifying the - values of other keys. +def format_dict(dictionary: dict[str, Any], class_name: str | None = None) -> dict[str, Any]: + """Formats a dictionary by removing certain keys and modifying the values of other keys. Returns: A new dictionary with the desired modifications applied. """ - for key, value in dictionary.items(): - if key in ["_type"]: + if key == "_type": continue - _type: Union[str, type] = get_type(value) + _type: str | type = get_type(value) if "BaseModel" in str(_type): continue @@ -246,9 +244,8 @@ def get_type_from_union_literal(union_literal: str) -> str: return union_literal -def get_type(value: Any) -> Union[str, type]: - """ - Retrieves the type value from the dictionary. +def get_type(value: Any) -> str | type: + """Retrieves the type value from the dictionary. Returns: The type value. @@ -259,9 +256,8 @@ def get_type(value: Any) -> Union[str, type]: return _type if isinstance(_type, str) else _type.__name__ -def remove_optional_wrapper(_type: Union[str, type]) -> str: - """ - Removes the 'Optional' wrapper from the type string. +def remove_optional_wrapper(_type: str | type) -> str: + """Removes the 'Optional' wrapper from the type string. Returns: The type string with the 'Optional' wrapper removed. @@ -274,9 +270,8 @@ def remove_optional_wrapper(_type: Union[str, type]) -> str: return _type -def check_list_type(_type: str, value: Dict[str, Any]) -> str: - """ - Checks if the type is a list type and modifies the value accordingly. +def check_list_type(_type: str, value: dict[str, Any]) -> str: + """Checks if the type is a list type and modifies the value accordingly. Returns: The modified type string. @@ -291,8 +286,7 @@ def check_list_type(_type: str, value: Dict[str, Any]) -> str: def replace_mapping_with_dict(_type: str) -> str: - """ - Replaces 'Mapping' with 'dict' in the type string. + """Replaces 'Mapping' with 'dict' in the type string. Returns: The modified type string. @@ -304,8 +298,7 @@ def replace_mapping_with_dict(_type: str) -> str: def get_formatted_type(key: str, _type: str) -> str: - """ - Formats the type value based on the given key. + """Formats the type value based on the given key. Returns: The formatted type value. @@ -313,15 +306,14 @@ def get_formatted_type(key: str, _type: str) -> str: if key == "allowed_tools": return "Tool" - elif key == "max_value_length": + if key == "max_value_length": return "int" return _type -def should_show_field(value: Dict[str, Any], key: str) -> bool: - """ - Determines if the field should be shown or not. +def should_show_field(value: dict[str, Any], key: str) -> bool: + """Determines if the field should be shown or not. Returns: True if the field should be shown, False otherwise. @@ -334,8 +326,7 @@ def should_show_field(value: Dict[str, Any], key: str) -> bool: def is_password_field(key: str) -> bool: - """ - Determines if the field is a password field. + """Determines if the field is a password field. Returns: True if the field is a password field, False otherwise. @@ -344,8 +335,7 @@ def is_password_field(key: str) -> bool: def is_multiline_field(key: str) -> bool: - """ - Determines if the field is a multiline field. + """Determines if the field is a multiline field. Returns: True if the field is a multiline field, False otherwise. @@ -361,34 +351,26 @@ def is_multiline_field(key: str) -> bool: } -def set_dict_file_attributes(value: Dict[str, Any]) -> None: - """ - Sets the file attributes for the 'dict_' key. - """ +def set_dict_file_attributes(value: dict[str, Any]) -> None: + """Sets the file attributes for the 'dict_' key.""" value["type"] = "file" value["fileTypes"] = [".json", ".yaml", ".yml"] -def replace_default_value_with_actual(value: Dict[str, Any]) -> None: - """ - Replaces the default value with the actual value. - """ +def replace_default_value_with_actual(value: dict[str, Any]) -> None: + """Replaces the default value with the actual value.""" if "default" in value: value["value"] = value["default"] value.pop("default") -def set_headers_value(value: Dict[str, Any]) -> None: - """ - Sets the value for the 'headers' key. - """ +def set_headers_value(value: dict[str, Any]) -> None: + """Sets the value for the 'headers' key.""" value["value"] = """{"Authorization": "Bearer "}""" -def add_options_to_field(value: Dict[str, Any], class_name: Optional[str], key: str) -> None: - """ - Adds options to the field based on the class name and key. - """ +def add_options_to_field(value: dict[str, Any], class_name: str | None, key: str) -> None: + """Adds options to the field based on the class name and key.""" options_map = { "OpenAI": constants.OPENAI_MODELS, "ChatOpenAI": constants.CHAT_OPENAI_MODELS, @@ -402,9 +384,8 @@ def add_options_to_field(value: Dict[str, Any], class_name: Optional[str], key: value["value"] = options_map[class_name][0] -def build_loader_repr_from_data(data: List[Data]) -> str: - """ - Builds a string representation of the loader based on the given data. +def build_loader_repr_from_data(data: list[Data]) -> str: + """Builds a string representation of the loader based on the given data. Args: data (List[Data]): A list of data. @@ -422,16 +403,18 @@ def build_loader_repr_from_data(data: List[Data]) -> str: def update_settings( - config: Optional[str] = None, - cache: Optional[str] = None, + *, + config: str | None = None, + cache: str | None = None, dev: bool = False, remove_api_keys: bool = False, - components_path: Optional[Path] = None, + components_path: Path | None = None, store: bool = True, auto_saving: bool = True, - auto_saving_interval: int = 300, + auto_saving_interval: int = 1000, health_check_max_retries: int = 5, -): + max_file_size_upload: int = 100, +) -> None: """Update the settings from a config file.""" from langflow.services.utils import initialize_settings_service @@ -463,12 +446,13 @@ def update_settings( if health_check_max_retries is not None: logger.debug(f"Setting health_check_max_retries to {health_check_max_retries}") settings_service.settings.update_settings(health_check_max_retries=health_check_max_retries) + if max_file_size_upload is not None: + logger.debug(f"Setting max_file_size_upload to {max_file_size_upload}") + settings_service.settings.update_settings(max_file_size_upload=max_file_size_upload) def is_class_method(func, cls): - """ - Check if a function is a class method. - """ + """Check if a function is a class method.""" return inspect.ismethod(func) and func.__self__ is cls.__class__ @@ -477,9 +461,7 @@ def escape_json_dump(edge_dict): def find_closest_match(string: str, list_of_strings: list[str]) -> str | None: - """ - Find the closest match in a list of strings. - """ + """Find the closest match in a list of strings.""" closest_match = difflib.get_close_matches(string, list_of_strings, n=1, cutoff=0.2) if closest_match: return closest_match[0] diff --git a/src/backend/base/langflow/utils/util_strings.py b/src/backend/base/langflow/utils/util_strings.py new file mode 100644 index 000000000000..51802e85d8dd --- /dev/null +++ b/src/backend/base/langflow/utils/util_strings.py @@ -0,0 +1,30 @@ +from langflow.utils import constants + + +def truncate_long_strings(data, max_length=None): + """Recursively traverse the dictionary or list and truncate strings longer than max_length.""" + if max_length is None: + max_length = constants.MAX_TEXT_LENGTH + + if max_length < 0: + return data + + if not isinstance(data, dict | list): + if isinstance(data, str) and len(data) > max_length: + return data[:max_length] + "..." + return data + + if isinstance(data, dict): + for key, value in data.items(): + if isinstance(value, str) and len(value) > max_length: + data[key] = value[:max_length] + "..." + elif isinstance(value, (dict | list)): + truncate_long_strings(value, max_length) + elif isinstance(data, list): + for index, item in enumerate(data): + if isinstance(item, str) and len(item) > max_length: + data[index] = item[:max_length] + "..." + elif isinstance(item, (dict | list)): + truncate_long_strings(item, max_length) + + return data diff --git a/src/backend/base/langflow/utils/validate.py b/src/backend/base/langflow/utils/validate.py index 7a0eef7b4a50..40c029385c02 100644 --- a/src/backend/base/langflow/utils/validate.py +++ b/src/backend/base/langflow/utils/validate.py @@ -1,21 +1,24 @@ import ast import contextlib import importlib +import warnings from types import FunctionType -from typing import Dict, List, Optional, Union +from typing import Optional, Union +from langchain_core._api.deprecation import LangChainDeprecationWarning +from loguru import logger from pydantic import ValidationError from langflow.field_typing.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES -def add_type_ignores(): +def add_type_ignores() -> None: if not hasattr(ast, "TypeIgnore"): class TypeIgnore(ast.AST): _fields = () - ast.TypeIgnore = TypeIgnore + ast.TypeIgnore = TypeIgnore # type: ignore[assignment, misc] def validate_code(code): @@ -25,7 +28,11 @@ def validate_code(code): # Parse the code string into an abstract syntax tree (AST) try: tree = ast.parse(code) - except Exception as e: + except Exception as e: # noqa: BLE001 + if hasattr(logger, "opt"): + logger.opt(exception=True).debug("Error parsing code") + else: + logger.debug("Error parsing code") errors["function"]["errors"].append(str(e)) return errors @@ -48,7 +55,8 @@ def validate_code(code): code_obj = compile(ast.Module(body=[node], type_ignores=[]), "", "exec") try: exec(code_obj) - except Exception as e: + except Exception as e: # noqa: BLE001 + logger.opt(exception=True).debug("Error executing function code") errors["function"]["errors"].append(str(e)) # Return the errors dictionary @@ -57,7 +65,7 @@ def validate_code(code): def eval_function(function_string: str): # Create an empty dictionary to serve as a separate namespace - namespace: Dict = {} + namespace: dict = {} # Execute the code string in the new namespace exec(function_string, namespace) @@ -70,7 +78,8 @@ def eval_function(function_string: str): None, ) if function_object is None: - raise ValueError("Function string does not contain a function") + msg = "Function string does not contain a function" + raise ValueError(msg) return function_object @@ -91,7 +100,8 @@ def execute_function(code, function_name, *args, **kwargs): ) exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e function_code = next( node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name @@ -101,7 +111,8 @@ def execute_function(code, function_name, *args, **kwargs): try: exec(code_obj, exec_globals, locals()) except Exception as exc: - raise ValueError("Function string does not contain a function") from exc + msg = "Function string does not contain a function" + raise ValueError(msg) from exc # Add the function to the exec_globals dictionary exec_globals[function_name] = locals()[function_name] @@ -121,12 +132,20 @@ class TypeIgnore(ast.AST): exec_globals = globals().copy() for node in module.body: - if isinstance(node, ast.Import): + if isinstance(node, ast.Import | ast.ImportFrom): for alias in node.names: try: - exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) + if isinstance(node, ast.ImportFrom): + module_name = node.module + exec_globals[alias.asname or alias.name] = getattr( + importlib.import_module(module_name), alias.name + ) + else: + module_name = alias.name + exec_globals[alias.asname or alias.name] = importlib.import_module(module_name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e function_code = next( node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name @@ -149,8 +168,7 @@ def wrapped_function(*args, **kwargs): def create_class(code, class_name): - """ - Dynamically create a class from a string of code and a specified class name. + """Dynamically create a class from a string of code and a specified class name. :param code: String containing the Python code defining the class :param class_name: Name of the class to be created @@ -179,8 +197,7 @@ def create_class(code, class_name): def create_type_ignore_class(): - """ - Create a TypeIgnore class for AST module if it doesn't exist. + """Create a TypeIgnore class for AST module if it doesn't exist. :return: TypeIgnore class """ @@ -192,8 +209,7 @@ class TypeIgnore(ast.AST): def prepare_global_scope(code, module): - """ - Prepares the global scope with necessary imports from the provided code module. + """Prepares the global scope with necessary imports from the provided code module. :param module: AST parsed module :return: Dictionary representing the global scope with imported modules @@ -206,20 +222,33 @@ def prepare_global_scope(code, module): try: exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e + msg = f"Module {alias.name} not found. Please install it and try again." + raise ModuleNotFoundError(msg) from e elif isinstance(node, ast.ImportFrom) and node.module is not None: try: - imported_module = importlib.import_module(node.module) - for alias in node.names: - exec_globals[alias.name] = getattr(imported_module, alias.name) - except ModuleNotFoundError: - raise ModuleNotFoundError(f"Module {node.module} not found. Please install it and try again") + with warnings.catch_warnings(): + warnings.simplefilter("ignore", LangChainDeprecationWarning) + imported_module = importlib.import_module(node.module) + for alias in node.names: + exec_globals[alias.name] = getattr(imported_module, alias.name) + except ModuleNotFoundError as e: + msg = f"Module {node.module} not found. Please install it and try again" + raise ModuleNotFoundError(msg) from e + elif isinstance(node, ast.ClassDef): + # Compile and execute the class definition to properly create the class + class_code = compile(ast.Module(body=[node], type_ignores=[]), "", "exec") + exec(class_code, exec_globals) + elif isinstance(node, ast.FunctionDef): + function_code = compile(ast.Module(body=[node], type_ignores=[]), "", "exec") + exec(function_code, exec_globals) + elif isinstance(node, ast.Assign): + assign_code = compile(ast.Module(body=[node], type_ignores=[]), "", "exec") + exec(assign_code, exec_globals) return exec_globals def extract_class_code(module, class_name): - """ - Extracts the AST node for the specified class from the module. + """Extracts the AST node for the specified class from the module. :param module: AST parsed module :param class_name: Name of the class to extract @@ -232,26 +261,22 @@ def extract_class_code(module, class_name): def compile_class_code(class_code): - """ - Compiles the AST node of a class into a code object. + """Compiles the AST node of a class into a code object. :param class_code: AST node of the class :return: Compiled code object of the class """ - code_obj = compile(ast.Module(body=[class_code], type_ignores=[]), "", "exec") - return code_obj + return compile(ast.Module(body=[class_code], type_ignores=[]), "", "exec") def build_class_constructor(compiled_class, exec_globals, class_name): - """ - Builds a constructor function for the dynamically created class. + """Builds a constructor function for the dynamically created class. :param compiled_class: Compiled code object of the class :param exec_globals: Global scope with necessary imports :param class_name: Name of the class :return: Constructor function for the class """ - exec(compiled_class, exec_globals, locals()) exec_globals[class_name] = locals()[class_name] @@ -270,14 +295,11 @@ def build_custom_class(): def get_default_imports(code_string): - """ - Returns a dictionary of default imports for the dynamic class constructor. - """ - + """Returns a dictionary of default imports for the dynamic class constructor.""" default_imports = { "Optional": Optional, - "List": List, - "Dict": Dict, + "List": list, + "Dict": dict, "Union": Union, } langflow_imports = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()) @@ -289,15 +311,13 @@ def get_default_imports(code_string): def find_names_in_code(code, names): - """ - Finds if any of the specified names are present in the given code string. + """Finds if any of the specified names are present in the given code string. :param code: The source code as a string. :param names: A list of names to check for in the code. :return: A set of names that are found in the code. """ - found_names = {name for name in names if name in code} - return found_names + return {name for name in names if name in code} def extract_function_name(code): @@ -305,12 +325,36 @@ def extract_function_name(code): for node in module.body: if isinstance(node, ast.FunctionDef): return node.name - raise ValueError("No function definition found in the code string") + msg = "No function definition found in the code string" + raise ValueError(msg) -def extract_class_name(code): - module = ast.parse(code) - for node in module.body: - if isinstance(node, ast.ClassDef): - return node.name - raise ValueError("No class definition found in the code string") +def extract_class_name(code: str) -> str: + """Extract the name of the first Component subclass found in the code. + + Args: + code (str): The source code to parse + + Returns: + str: Name of the first Component subclass found + + Raises: + ValueError: If no Component subclass is found in the code + """ + try: + module = ast.parse(code) + for node in module.body: + if not isinstance(node, ast.ClassDef): + continue + + # Check bases for Component inheritance + # TODO: Build a more robust check for Component inheritance + for base in node.bases: + if isinstance(base, ast.Name) and any(pattern in base.id for pattern in ["Component", "LC"]): + return node.name + + msg = f"No Component subclass found in the code string. Code snippet: {code[:100]}" + raise TypeError(msg) + except SyntaxError as e: + msg = f"Invalid Python code: {e!s}" + raise ValueError(msg) from e diff --git a/src/backend/base/langflow/utils/version.py b/src/backend/base/langflow/utils/version.py index 98936e347661..817247dda96f 100644 --- a/src/backend/base/langflow/utils/version.py +++ b/src/backend/base/langflow/utils/version.py @@ -1,3 +1,8 @@ +import httpx + +from langflow.logging.logger import logger + + def _compute_non_prerelease_version(prerelease_version: str) -> str: prerelease_keywords = ["a", "b", "rc", "dev", "post"] for keyword in prerelease_keywords: @@ -7,23 +12,82 @@ def _compute_non_prerelease_version(prerelease_version: str) -> str: def _get_version_info(): - try: - from langflow.version import __version__ # type: ignore + """Retrieves the version of the package from a possible list of package names. + + This accounts for after package names are updated for -nightly builds. - prerelease_version = __version__ - version = _compute_non_prerelease_version(prerelease_version) - package = "Langflow" - except ImportError: - from importlib import metadata + Returns: + str: The version of the package - prerelease_version = metadata.version("langflow-base") - version = _compute_non_prerelease_version(prerelease_version) - package = "Langflow Base" - return {"version": prerelease_version, "main_version": version, "package": package} + Raises: + ValueError: If the package is not found from the list of package names. + """ + from importlib import metadata + + package_options = [ + ("langflow", "Langflow"), + ("langflow-base", "Langflow Base"), + ("langflow-nightly", "Langflow Nightly"), + ("langflow-base-nightly", "Langflow Base Nightly"), + ] + __version__ = None + for pkg_name, display_name in package_options: + try: + __version__ = metadata.version(pkg_name) + prerelease_version = __version__ + version = _compute_non_prerelease_version(prerelease_version) + except (ImportError, metadata.PackageNotFoundError): + pass + else: + return { + "version": prerelease_version, + "main_version": version, + "package": display_name, + } + + if __version__ is None: + msg = f"Package not found from options {package_options}" + raise ValueError(msg) + return None VERSION_INFO = _get_version_info() +def is_pre_release(v: str) -> bool: + """Whether the version is a pre-release version. + + Returns a boolean indicating whether the version is a pre-release version, + as per the definition of a pre-release segment from PEP 440. + """ + return any(label in v for label in ["a", "b", "rc"]) + + +def is_nightly(v: str) -> bool: + """Whether the version is a dev (nightly) version. + + Returns a boolean indicating whether the version is a dev (nightly) version, + as per the definition of a dev segment from PEP 440. + """ + return "dev" in v + + +def fetch_latest_version(package_name: str, *, include_prerelease: bool) -> str | None: + from packaging import version as pkg_version + + package_name = package_name.replace(" ", "-").lower() + try: + response = httpx.get(f"https://pypi.org/pypi/{package_name}/json") + versions = response.json()["releases"].keys() + valid_versions = [v for v in versions if include_prerelease or not is_pre_release(v)] + if not valid_versions: + return None # Handle case where no valid versions are found + return max(valid_versions, key=pkg_version.parse) + + except Exception: # noqa: BLE001 + logger.exception("Error fetching latest version") + return None + + def get_version_info(): return VERSION_INFO diff --git a/src/backend/base/langflow/worker.py b/src/backend/base/langflow/worker.py index a5d1cf956474..e79f7146ba99 100644 --- a/src/backend/base/langflow/worker.py +++ b/src/backend/base/langflow/worker.py @@ -1,7 +1,9 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from asgiref.sync import async_to_sync -from celery.exceptions import SoftTimeLimitExceeded # type: ignore +from celery.exceptions import SoftTimeLimitExceeded from langflow.core.celery_app import celery_app @@ -15,23 +17,17 @@ def test_celery(word: str) -> str: @celery_app.task(bind=True, soft_time_limit=30, max_retries=3) -def build_vertex(self, vertex: "Vertex") -> "Vertex": - """ - Build a vertex - """ +def build_vertex(self, vertex: Vertex) -> Vertex: + """Build a vertex.""" try: vertex.task_id = self.request.id async_to_sync(vertex.build)() - return vertex except SoftTimeLimitExceeded as e: raise self.retry(exc=SoftTimeLimitExceeded("Task took too long"), countdown=2) from e + return vertex @celery_app.task(acks_late=True) -def process_graph_cached_task( - data_graph: Dict[str, Any], - inputs: Optional[Union[dict, List[dict]]] = None, - clear_cache=False, - session_id=None, -) -> Dict[str, Any]: - raise NotImplementedError("This task is not implemented yet") +def process_graph_cached_task() -> dict[str, Any]: + msg = "This task is not implemented yet" + raise NotImplementedError(msg) diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock deleted file mode 100644 index ad710ae11a86..000000000000 --- a/src/backend/base/poetry.lock +++ /dev/null @@ -1,7052 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "aiofiles" -version = "24.1.0" -description = "File support for asyncio." -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, - {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, -] - -[[package]] -name = "aiohappyeyeballs" -version = "2.3.5" -description = "Happy Eyeballs for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, -] - -[[package]] -name = "aiohttp" -version = "3.10.3" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, - {file = "aiohttp-3.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71bb1d97bfe7e6726267cea169fdf5df7658831bb68ec02c9c6b9f3511e108bb"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baec1eb274f78b2de54471fc4c69ecbea4275965eab4b556ef7a7698dee18bf2"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13031e7ec1188274bad243255c328cc3019e36a5a907978501256000d57a7201"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bbc55a964b8eecb341e492ae91c3bd0848324d313e1e71a27e3d96e6ee7e8e8"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8cc0564b286b625e673a2615ede60a1704d0cbbf1b24604e28c31ed37dc62aa"}, - {file = "aiohttp-3.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f817a54059a4cfbc385a7f51696359c642088710e731e8df80d0607193ed2b73"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8542c9e5bcb2bd3115acdf5adc41cda394e7360916197805e7e32b93d821ef93"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:671efce3a4a0281060edf9a07a2f7e6230dca3a1cbc61d110eee7753d28405f7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0974f3b5b0132edcec92c3306f858ad4356a63d26b18021d859c9927616ebf27"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:44bb159b55926b57812dca1b21c34528e800963ffe130d08b049b2d6b994ada7"}, - {file = "aiohttp-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6ae9ae382d1c9617a91647575255ad55a48bfdde34cc2185dd558ce476bf16e9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win32.whl", hash = "sha256:aed12a54d4e1ee647376fa541e1b7621505001f9f939debf51397b9329fd88b9"}, - {file = "aiohttp-3.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b51aef59370baf7444de1572f7830f59ddbabd04e5292fa4218d02f085f8d299"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e021c4c778644e8cdc09487d65564265e6b149896a17d7c0f52e9a088cc44e1b"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24fade6dae446b183e2410a8628b80df9b7a42205c6bfc2eff783cbeedc224a2"}, - {file = "aiohttp-3.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bc8e9f15939dacb0e1f2d15f9c41b786051c10472c7a926f5771e99b49a5957f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a9ec959b5381271c8ec9310aae1713b2aec29efa32e232e5ef7dcca0df0279"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a5d0ea8a6467b15d53b00c4e8ea8811e47c3cc1bdbc62b1aceb3076403d551f"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9ed607dbbdd0d4d39b597e5bf6b0d40d844dfb0ac6a123ed79042ef08c1f87e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e66d5b506832e56add66af88c288c1d5ba0c38b535a1a59e436b300b57b23e"}, - {file = "aiohttp-3.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fda91ad797e4914cca0afa8b6cccd5d2b3569ccc88731be202f6adce39503189"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:61ccb867b2f2f53df6598eb2a93329b5eee0b00646ee79ea67d68844747a418e"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d881353264e6156f215b3cb778c9ac3184f5465c2ece5e6fce82e68946868ef"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b031ce229114825f49cec4434fa844ccb5225e266c3e146cb4bdd025a6da52f1"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5337cc742a03f9e3213b097abff8781f79de7190bbfaa987bd2b7ceb5bb0bdec"}, - {file = "aiohttp-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab3361159fd3dcd0e48bbe804006d5cfb074b382666e6c064112056eb234f1a9"}, - {file = "aiohttp-3.10.3-cp311-cp311-win32.whl", hash = "sha256:05d66203a530209cbe40f102ebaac0b2214aba2a33c075d0bf825987c36f1f0b"}, - {file = "aiohttp-3.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:70b4a4984a70a2322b70e088d654528129783ac1ebbf7dd76627b3bd22db2f17"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:166de65e2e4e63357cfa8417cf952a519ac42f1654cb2d43ed76899e2319b1ee"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7084876352ba3833d5d214e02b32d794e3fd9cf21fdba99cff5acabeb90d9806"}, - {file = "aiohttp-3.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d98c604c93403288591d7d6d7d6cc8a63459168f8846aeffd5b3a7f3b3e5e09"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d73b073a25a0bb8bf014345374fe2d0f63681ab5da4c22f9d2025ca3e3ea54fc"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8da6b48c20ce78f5721068f383e0e113dde034e868f1b2f5ee7cb1e95f91db57"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a9dcdccf50284b1b0dc72bc57e5bbd3cc9bf019060dfa0668f63241ccc16aa7"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56fb94bae2be58f68d000d046172d8b8e6b1b571eb02ceee5535e9633dcd559c"}, - {file = "aiohttp-3.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf75716377aad2c718cdf66451c5cf02042085d84522aec1f9246d3e4b8641a6"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c51ed03e19c885c8e91f574e4bbe7381793f56f93229731597e4a499ffef2a5"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b84857b66fa6510a163bb083c1199d1ee091a40163cfcbbd0642495fed096204"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c124b9206b1befe0491f48185fd30a0dd51b0f4e0e7e43ac1236066215aff272"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3461d9294941937f07bbbaa6227ba799bc71cc3b22c40222568dc1cca5118f68"}, - {file = "aiohttp-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08bd0754d257b2db27d6bab208c74601df6f21bfe4cb2ec7b258ba691aac64b3"}, - {file = "aiohttp-3.10.3-cp312-cp312-win32.whl", hash = "sha256:7f9159ae530297f61a00116771e57516f89a3de6ba33f314402e41560872b50a"}, - {file = "aiohttp-3.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:e1128c5d3a466279cb23c4aa32a0f6cb0e7d2961e74e9e421f90e74f75ec1edf"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d1100e68e70eb72eadba2b932b185ebf0f28fd2f0dbfe576cfa9d9894ef49752"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a541414578ff47c0a9b0b8b77381ea86b0c8531ab37fc587572cb662ccd80b88"}, - {file = "aiohttp-3.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d5548444ef60bf4c7b19ace21f032fa42d822e516a6940d36579f7bfa8513f9c"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba2e838b5e6a8755ac8297275c9460e729dc1522b6454aee1766c6de6d56e5e"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48665433bb59144aaf502c324694bec25867eb6630fcd831f7a893ca473fcde4"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bac352fceed158620ce2d701ad39d4c1c76d114255a7c530e057e2b9f55bdf9f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0f670502100cdc567188c49415bebba947eb3edaa2028e1a50dd81bd13363f"}, - {file = "aiohttp-3.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b09f38a67679e32d380fe512189ccb0b25e15afc79b23fbd5b5e48e4fc8fd9"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cd788602e239ace64f257d1c9d39898ca65525583f0fbf0988bcba19418fe93f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:214277dcb07ab3875f17ee1c777d446dcce75bea85846849cc9d139ab8f5081f"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:32007fdcaab789689c2ecaaf4b71f8e37bf012a15cd02c0a9db8c4d0e7989fa8"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:123e5819bfe1b87204575515cf448ab3bf1489cdeb3b61012bde716cda5853e7"}, - {file = "aiohttp-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:812121a201f0c02491a5db335a737b4113151926a79ae9ed1a9f41ea225c0e3f"}, - {file = "aiohttp-3.10.3-cp38-cp38-win32.whl", hash = "sha256:b97dc9a17a59f350c0caa453a3cb35671a2ffa3a29a6ef3568b523b9113d84e5"}, - {file = "aiohttp-3.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:3731a73ddc26969d65f90471c635abd4e1546a25299b687e654ea6d2fc052394"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38d91b98b4320ffe66efa56cb0f614a05af53b675ce1b8607cdb2ac826a8d58e"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9743fa34a10a36ddd448bba8a3adc2a66a1c575c3c2940301bacd6cc896c6bf1"}, - {file = "aiohttp-3.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c126f532caf238031c19d169cfae3c6a59129452c990a6e84d6e7b198a001dc"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926e68438f05703e500b06fe7148ef3013dd6f276de65c68558fa9974eeb59ad"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:434b3ab75833accd0b931d11874e206e816f6e6626fd69f643d6a8269cd9166a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d35235a44ec38109b811c3600d15d8383297a8fab8e3dec6147477ec8636712a"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59c489661edbd863edb30a8bd69ecb044bd381d1818022bc698ba1b6f80e5dd1"}, - {file = "aiohttp-3.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50544fe498c81cb98912afabfc4e4d9d85e89f86238348e3712f7ca6a2f01dab"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09bc79275737d4dc066e0ae2951866bb36d9c6b460cb7564f111cc0427f14844"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:af4dbec58e37f5afff4f91cdf235e8e4b0bd0127a2a4fd1040e2cad3369d2f06"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b22cae3c9dd55a6b4c48c63081d31c00fc11fa9db1a20c8a50ee38c1a29539d2"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ba562736d3fbfe9241dad46c1a8994478d4a0e50796d80e29d50cabe8fbfcc3f"}, - {file = "aiohttp-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f25d6c4e82d7489be84f2b1c8212fafc021b3731abdb61a563c90e37cced3a21"}, - {file = "aiohttp-3.10.3-cp39-cp39-win32.whl", hash = "sha256:b69d832e5f5fa15b1b6b2c8eb6a9fd2c0ec1fd7729cb4322ed27771afc9fc2ac"}, - {file = "aiohttp-3.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:673bb6e3249dc8825df1105f6ef74e2eab779b7ff78e96c15cadb78b04a83752"}, - {file = "aiohttp-3.10.3.tar.gz", hash = "sha256:21650e7032cc2d31fc23d353d7123e771354f2a3d5b05a5647fc30fea214e696"}, -] - -[package.dependencies] -aiohappyeyeballs = ">=2.3.0" -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "alembic" -version = "1.13.2" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, -] - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = "*" -files = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "asyncer" -version = "0.0.5" -description = "Asyncer, async and await, focused on developer experience." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "asyncer-0.0.5-py3-none-any.whl", hash = "sha256:ba06d6de3c750763868dffacf89b18d40b667605b0241d31c2ee43f188e2ab74"}, - {file = "asyncer-0.0.5.tar.gz", hash = "sha256:2979f3e04cbedfe5cfeb79027dcf7d004fcc4430a0ca0066ae20490f218ec06e"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5.0" - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bcrypt" -version = "4.0.1" -description = "Modern password hashing for your software and your servers" -optional = false -python-versions = ">=3.6" -files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, -] - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "boto3" -version = "1.34.161" -description = "The AWS SDK for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "boto3-1.34.161-py3-none-any.whl", hash = "sha256:4ef285334a0edc3047e27a04caf00f7742e32c0f03a361101e768014ac5709dd"}, - {file = "boto3-1.34.161.tar.gz", hash = "sha256:a872d8fdb3203c1eb0b12fa9e9d879e6f7fd02983a485f02189e6d5914ccd834"}, -] - -[package.dependencies] -botocore = ">=1.34.161,<1.35.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] - -[[package]] -name = "botocore" -version = "1.34.161" -description = "Low-level, data-driven core of boto 3." -optional = false -python-versions = ">=3.8" -files = [ - {file = "botocore-1.34.161-py3-none-any.whl", hash = "sha256:6c606d2da6f62fde06880aff1190566af208875c29938b6b68741e607817975a"}, - {file = "botocore-1.34.161.tar.gz", hash = "sha256:16381bfb786142099abf170ce734b95a402a3a7f8e4016358712ac333c5568b2"}, -] - -[package.dependencies] -jmespath = ">=0.7.1,<2.0.0" -python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} - -[package.extras] -crt = ["awscrt (==0.21.2)"] - -[[package]] -name = "build" -version = "1.2.1" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">=3.8" -files = [ - {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, - {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} -packaging = ">=19.1" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] -uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "cachetools" -version = "5.4.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.17.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "chroma-hnswlib" -version = "0.7.3" -description = "Chromas fork of hnswlib" -optional = false -python-versions = "*" -files = [ - {file = "chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403"}, - {file = "chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756"}, - {file = "chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70f897dc6218afa1d99f43a9ad5eb82f392df31f57ff514ccf4eeadecd62f544"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aef10b4952708f5a1381c124a29aead0c356f8d7d6e0b520b778aaa62a356f4"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee2d8d1529fca3898d512079144ec3e28a81d9c17e15e0ea4665697a7923253"}, - {file = "chroma_hnswlib-0.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a4021a70e898783cd6f26e00008b494c6249a7babe8774e90ce4766dd288c8ba"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8f61fa1d417fda848e3ba06c07671f14806a2585272b175ba47501b066fe6b1"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7563be58bc98e8f0866907368e22ae218d6060601b79c42f59af4eccbbd2e0a"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51b8d411486ee70d7b66ec08cc8b9b6620116b650df9c19076d2d8b6ce2ae914"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d706782b628e4f43f1b8a81e9120ac486837fbd9bcb8ced70fe0d9b95c72d77"}, - {file = "chroma_hnswlib-0.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:54f053dedc0e3ba657f05fec6e73dd541bc5db5b09aa8bc146466ffb734bdc86"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e607c5a71c610a73167a517062d302c0827ccdd6e259af6e4869a5c1306ffb5d"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2358a795870156af6761890f9eb5ca8cade57eb10c5f046fe94dae1faa04b9e"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea425df2e6b8a5e201fff0d922a1cc1d165b3cfe762b1408075723c8892218"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:454df3dd3e97aa784fba7cf888ad191e0087eef0fd8c70daf28b753b3b591170"}, - {file = "chroma_hnswlib-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:df587d15007ca701c6de0ee7d5585dd5e976b7edd2b30ac72bc376b3c3f85882"}, -] - -[package.dependencies] -numpy = "*" - -[[package]] -name = "chromadb" -version = "0.4.24" -description = "Chroma." -optional = false -python-versions = ">=3.8" -files = [ - {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"}, - {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"}, -] - -[package.dependencies] -bcrypt = ">=4.0.1" -build = ">=1.0.3" -chroma-hnswlib = "0.7.3" -fastapi = ">=0.95.2" -grpcio = ">=1.58.0" -importlib-resources = "*" -kubernetes = ">=28.1.0" -mmh3 = ">=4.0.1" -numpy = ">=1.22.5" -onnxruntime = ">=1.14.1" -opentelemetry-api = ">=1.2.0" -opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" -opentelemetry-instrumentation-fastapi = ">=0.41b0" -opentelemetry-sdk = ">=1.2.0" -orjson = ">=3.9.12" -overrides = ">=7.3.1" -posthog = ">=2.4.0" -pulsar-client = ">=3.1.0" -pydantic = ">=1.9" -pypika = ">=0.48.9" -PyYAML = ">=6.0.0" -requests = ">=2.28" -tenacity = ">=8.2.3" -tokenizers = ">=0.13.2" -tqdm = ">=4.65.0" -typer = ">=0.9.0" -typing-extensions = ">=4.5.0" -uvicorn = {version = ">=0.18.3", extras = ["standard"]} - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cohere" -version = "5.8.0" -description = "" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cohere-5.8.0-py3-none-any.whl", hash = "sha256:f87f709be6dfe3dce57bef0dd5e90924e8828fb8d334c96fc27663b6a7298c6b"}, - {file = "cohere-5.8.0.tar.gz", hash = "sha256:c4e1ab064d66cc0170091f614b4ea22f55e079f2c7fe9e0de8752fd46f8d2a70"}, -] - -[package.dependencies] -boto3 = ">=1.34.0,<2.0.0" -fastavro = ">=1.9.4,<2.0.0" -httpx = ">=0.21.2" -httpx-sse = "0.4.0" -parameterized = ">=0.9.0,<0.10.0" -pydantic = ">=1.9.2" -pydantic-core = ">=2.18.2,<3.0.0" -requests = ">=2.0.0,<3.0.0" -tokenizers = ">=0.15,<1" -types-requests = ">=2.0.0,<3.0.0" -typing_extensions = ">=4.0.0" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coloredlogs" -version = "15.0.1" -description = "Colored terminal output for Python's logging module" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, - {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, -] - -[package.dependencies] -humanfriendly = ">=9.1" - -[package.extras] -cron = ["capturer (>=2.4)"] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "coverage" -version = "7.6.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "crewai" -version = "0.36.1" -description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks." -optional = false -python-versions = "<=3.13,>=3.10" -files = [ - {file = "crewai-0.36.1-py3-none-any.whl", hash = "sha256:dbaa50d102542ea0c790bd62511b35234b2f5fa8d2333a6598beb84f407f0e00"}, - {file = "crewai-0.36.1.tar.gz", hash = "sha256:ea50ec5d3ef2df85e1b520efd9331bebb49ed7143e6cd1feec645da49217d2b0"}, -] - -[package.dependencies] -appdirs = ">=1.4.4,<2.0.0" -click = ">=8.1.7,<9.0.0" -embedchain = ">=0.1.114,<0.2.0" -instructor = "1.3.3" -jsonref = ">=1.1.0,<2.0.0" -langchain = ">0.2,<=0.3" -openai = ">=1.13.3,<2.0.0" -opentelemetry-api = ">=1.22.0,<2.0.0" -opentelemetry-exporter-otlp-proto-http = ">=1.22.0,<2.0.0" -opentelemetry-sdk = ">=1.22.0,<2.0.0" -pydantic = ">=2.4.2,<3.0.0" -python-dotenv = ">=1.0.0,<2.0.0" -regex = ">=2023.12.25,<2024.0.0" - -[package.extras] -agentops = ["agentops (>=0.1.9,<0.2.0)"] -tools = ["crewai-tools (>=0.4.7,<0.5.0)"] - -[[package]] -name = "cryptography" -version = "42.0.8" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "devtools" -version = "0.12.2" -description = "Python's missing debug print command, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7"}, - {file = "devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507"}, -] - -[package.dependencies] -asttokens = ">=2.0.0,<3.0.0" -executing = ">=1.1.1" -pygments = ">=2.15.0" - -[[package]] -name = "dictdiffer" -version = "0.9.0" -description = "Dictdiffer is a library that helps you to diff and patch dictionaries." -optional = false -python-versions = "*" -files = [ - {file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"}, - {file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"}, -] - -[package.extras] -all = ["Sphinx (>=3)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "tox (>=3.7.0)"] -docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"] -numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"] -tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "tox (>=3.7.0)"] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "diskcache" -version = "5.6.3" -description = "Disk Cache -- Disk and file backed persistent cache." -optional = false -python-versions = ">=3" -files = [ - {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, - {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, -] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "docstring-parser" -version = "0.16" -description = "Parse Python docstrings in reST, Google and Numpydoc format" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, - {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, -] - -[[package]] -name = "duckdb" -version = "1.0.0" -description = "DuckDB in-process database" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4a8ce2d1f9e1c23b9bab3ae4ca7997e9822e21563ff8f646992663f66d050211"}, - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:19797670f20f430196e48d25d082a264b66150c264c1e8eae8e22c64c2c5f3f5"}, - {file = "duckdb-1.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b71c342090fe117b35d866a91ad6bffce61cd6ff3e0cff4003f93fc1506da0d8"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dd69f44ad212c35ae2ea736b0e643ea2b70f204b8dff483af1491b0e2a4cec"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8da5f293ecb4f99daa9a9352c5fd1312a6ab02b464653a0c3a25ab7065c45d4d"}, - {file = "duckdb-1.0.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3207936da9967ddbb60644ec291eb934d5819b08169bc35d08b2dedbe7068c60"}, - {file = "duckdb-1.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1128d6c9c33e883b1f5df6b57c1eb46b7ab1baf2650912d77ee769aaa05111f9"}, - {file = "duckdb-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:02310d263474d0ac238646677feff47190ffb82544c018b2ff732a4cb462c6ef"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:75586791ab2702719c284157b65ecefe12d0cca9041da474391896ddd9aa71a4"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:83bb415fc7994e641344f3489e40430ce083b78963cb1057bf714ac3a58da3ba"}, - {file = "duckdb-1.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:bee2e0b415074e84c5a2cefd91f6b5ebeb4283e7196ba4ef65175a7cef298b57"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa5a4110d2a499312609544ad0be61e85a5cdad90e5b6d75ad16b300bf075b90"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fa389e6a382d4707b5f3d1bc2087895925ebb92b77e9fe3bfb23c9b98372fdc"}, - {file = "duckdb-1.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ede6f5277dd851f1a4586b0c78dc93f6c26da45e12b23ee0e88c76519cbdbe0"}, - {file = "duckdb-1.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0b88cdbc0d5c3e3d7545a341784dc6cafd90fc035f17b2f04bf1e870c68456e5"}, - {file = "duckdb-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd1693cdd15375156f7fff4745debc14e5c54928589f67b87fb8eace9880c370"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c65a7fe8a8ce21b985356ee3ec0c3d3b3b2234e288e64b4cfb03356dbe6e5583"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:e5a8eda554379b3a43b07bad00968acc14dd3e518c9fbe8f128b484cf95e3d16"}, - {file = "duckdb-1.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:a1b6acdd54c4a7b43bd7cb584975a1b2ff88ea1a31607a2b734b17960e7d3088"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a677bb1b6a8e7cab4a19874249d8144296e6e39dae38fce66a80f26d15e670df"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752e9d412b0a2871bf615a2ede54be494c6dc289d076974eefbf3af28129c759"}, - {file = "duckdb-1.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3aadb99d098c5e32d00dc09421bc63a47134a6a0de9d7cd6abf21780b678663c"}, - {file = "duckdb-1.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83b7091d4da3e9301c4f9378833f5ffe934fb1ad2b387b439ee067b2c10c8bb0"}, - {file = "duckdb-1.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:6a8058d0148b544694cb5ea331db44f6c2a00a7b03776cc4dd1470735c3d5ff7"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40cb20e5ee19d44bc66ec99969af791702a049079dc5f248c33b1c56af055f4"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7bce1bc0de9af9f47328e24e6e7e39da30093179b1c031897c042dd94a59c8e"}, - {file = "duckdb-1.0.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8355507f7a04bc0a3666958f4414a58e06141d603e91c0fa5a7c50e49867fb6d"}, - {file = "duckdb-1.0.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:39f1a46f5a45ad2886dc9b02ce5b484f437f90de66c327f86606d9ba4479d475"}, - {file = "duckdb-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d29ba477b27ae41676b62c8fae8d04ee7cbe458127a44f6049888231ca58fa"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:1bea713c1925918714328da76e79a1f7651b2b503511498ccf5e007a7e67d49e"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:bfe67f3bcf181edbf6f918b8c963eb060e6aa26697d86590da4edc5707205450"}, - {file = "duckdb-1.0.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:dbc6093a75242f002be1d96a6ace3fdf1d002c813e67baff52112e899de9292f"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba1881a2b11c507cee18f8fd9ef10100be066fddaa2c20fba1f9a664245cd6d8"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:445d0bb35087c522705c724a75f9f1c13f1eb017305b694d2686218d653c8142"}, - {file = "duckdb-1.0.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:224553432e84432ffb9684f33206572477049b371ce68cc313a01e214f2fbdda"}, - {file = "duckdb-1.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d3914032e47c4e76636ad986d466b63fdea65e37be8a6dfc484ed3f462c4fde4"}, - {file = "duckdb-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:af9128a2eb7e1bb50cd2c2020d825fb2946fdad0a2558920cd5411d998999334"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dd2659a5dbc0df0de68f617a605bf12fe4da85ba24f67c08730984a0892087e8"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:ac5a4afb0bc20725e734e0b2c17e99a274de4801aff0d4e765d276b99dad6d90"}, - {file = "duckdb-1.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c5a53bee3668d6e84c0536164589d5127b23d298e4c443d83f55e4150fafe61"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b980713244d7708b25ee0a73de0c65f0e5521c47a0e907f5e1b933d79d972ef6"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cbd4f9fe7b7a56eff96c3f4d6778770dd370469ca2212eddbae5dd63749db5"}, - {file = "duckdb-1.0.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed228167c5d49888c5ef36f6f9cbf65011c2daf9dcb53ea8aa7a041ce567b3e4"}, - {file = "duckdb-1.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46d8395fbcea7231fd5032a250b673cc99352fef349b718a23dea2c0dd2b8dec"}, - {file = "duckdb-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6ad1fc1a4d57e7616944166a5f9417bdbca1ea65c490797e3786e3a42e162d8a"}, - {file = "duckdb-1.0.0.tar.gz", hash = "sha256:a2a059b77bc7d5b76ae9d88e267372deff19c291048d59450c431e166233d453"}, -] - -[[package]] -name = "ecdsa" -version = "0.19.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" -files = [ - {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, - {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - -[[package]] -name = "email-validator" -version = "2.2.0" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - -[[package]] -name = "embedchain" -version = "0.1.120" -description = "Simplest open source retrieval (RAG) framework" -optional = false -python-versions = "<=3.13,>=3.9" -files = [ - {file = "embedchain-0.1.120-py3-none-any.whl", hash = "sha256:9eaa946f8a7b394080c56067849d7852a78361dd5e7b099ebf42989c07a1814d"}, - {file = "embedchain-0.1.120.tar.gz", hash = "sha256:6061c261a054d677e5b9c4062146d45e04e8572c67152120913d61aee4c22ae3"}, -] - -[package.dependencies] -alembic = ">=1.13.1,<2.0.0" -beautifulsoup4 = ">=4.12.2,<5.0.0" -chromadb = ">=0.4.24,<0.5.0" -cohere = ">=5.3,<6.0" -google-cloud-aiplatform = ">=1.26.1,<2.0.0" -gptcache = ">=0.1.43,<0.2.0" -langchain = ">0.2,<=0.3" -langchain-cohere = ">=0.1.4,<0.2.0" -langchain-community = ">=0.2.6,<0.3.0" -langchain-openai = ">=0.1.7,<0.2.0" -mem0ai = ">=0.0.9,<0.0.10" -openai = ">=1.1.1" -posthog = ">=3.0.2,<4.0.0" -pypdf = ">=4.0.1,<5.0.0" -pysbd = ">=0.3.4,<0.4.0" -python-dotenv = ">=1.0.0,<2.0.0" -rich = ">=13.7.0,<14.0.0" -schema = ">=0.7.5,<0.8.0" -sqlalchemy = ">=2.0.27,<3.0.0" -tiktoken = ">=0.7.0,<0.8.0" - -[package.extras] -aws = ["langchain-aws (>=0.1.10,<0.2.0)"] -elasticsearch = ["elasticsearch (>=8.9.0,<9.0.0)"] -gmail = ["google-api-core (>=2.15.0,<3.0.0)", "google-api-python-client (>=2.111.0,<3.0.0)", "google-auth (>=2.25.2,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)", "requests (>=2.31.0,<3.0.0)"] -google = ["google-generativeai (>=0.3.0,<0.4.0)"] -googledrive = ["google-api-python-client (>=2.111.0,<3.0.0)", "google-auth-httplib2 (>=0.2.0,<0.3.0)", "google-auth-oauthlib (>=1.2.0,<2.0.0)"] -lancedb = ["lancedb (>=0.6.2,<0.7.0)"] -llama2 = ["replicate (>=0.15.4,<0.16.0)"] -milvus = ["pymilvus (==2.4.3)"] -mistralai = ["langchain-mistralai (>=0.1.9,<0.2.0)"] -mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"] -opensearch = ["opensearch-py (==2.3.1)"] -opensource = ["gpt4all (==2.0.2)", "sentence-transformers (>=2.2.2,<3.0.0)", "torch (==2.3.0)"] -postgres = ["psycopg (>=3.1.12,<4.0.0)", "psycopg-binary (>=3.1.12,<4.0.0)", "psycopg-pool (>=3.1.8,<4.0.0)"] -qdrant = ["qdrant-client (>=1.6.3,<2.0.0)"] -together = ["together (>=1.2.1,<2.0.0)"] -vertexai = ["langchain-google-vertexai (>=1.0.6,<2.0.0)"] -weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"] - -[[package]] -name = "emoji" -version = "2.12.1" -description = "Emoji for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"}, - {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"}, -] - -[package.dependencies] -typing-extensions = ">=4.7.0" - -[package.extras] -dev = ["coverage", "pytest (>=7.4.4)"] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "execnet" -version = "2.1.1" -description = "execnet: rapid multi-Python deployment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, - {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, -] - -[package.extras] -testing = ["hatch", "pre-commit", "pytest", "tox"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastapi" -version = "0.111.1" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf"}, - {file = "fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413"}, -] - -[package.dependencies] -email_validator = ">=2.0.0" -fastapi-cli = ">=0.0.2" -httpx = ">=0.23.0" -jinja2 = ">=2.11.2" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -python-multipart = ">=0.0.7" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" -uvicorn = {version = ">=0.12.0", extras = ["standard"]} - -[package.extras] -all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastapi-cli" -version = "0.0.5" -description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46"}, - {file = "fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f"}, -] - -[package.dependencies] -typer = ">=0.12.3" -uvicorn = {version = ">=0.15.0", extras = ["standard"]} - -[package.extras] -standard = ["uvicorn[standard] (>=0.15.0)"] - -[[package]] -name = "fastavro" -version = "1.9.5" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61253148e95dd2b6457247b441b7555074a55de17aef85f5165bfd5facf600fc"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b604935d671ad47d888efc92a106f98e9440874108b444ac10e28d643109c937"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0adbf4956fd53bd74c41e7855bb45ccce953e0eb0e44f5836d8d54ad843f9944"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53d838e31457db8bf44460c244543f75ed307935d5fc1d93bc631cc7caef2082"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:07b6288e8681eede16ff077632c47395d4925c2f51545cd7a60f194454db2211"}, - {file = "fastavro-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:ef08cf247fdfd61286ac0c41854f7194f2ad05088066a756423d7299b688d975"}, - {file = "fastavro-1.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c52d7bb69f617c90935a3e56feb2c34d4276819a5c477c466c6c08c224a10409"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e05969956003df8fa4491614bc62fe40cec59e94d06e8aaa8d8256ee3aab82"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e6df8527493a9f0d9a8778df82bab8b1aa6d80d1b004e5aec0a31dc4dc501c"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27820da3b17bc01cebb6d1687c9d7254b16d149ef458871aaa207ed8950f3ae6"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:195a5b8e33eb89a1a9b63fa9dce7a77d41b3b0cd785bac6044df619f120361a2"}, - {file = "fastavro-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:be612c109efb727bfd36d4d7ed28eb8e0506617b7dbe746463ebbf81e85eaa6b"}, - {file = "fastavro-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b133456c8975ec7d2a99e16a7e68e896e45c821b852675eac4ee25364b999c14"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf586373c3d1748cac849395aad70c198ee39295f92e7c22c75757b5c0300fbe"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:724ef192bc9c55d5b4c7df007f56a46a21809463499856349d4580a55e2b914c"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bfd11fe355a8f9c0416803afac298960eb4c603a23b1c74ff9c1d3e673ea7185"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9827d1654d7bcb118ef5efd3e5b2c9ab2a48d44dac5e8c6a2327bc3ac3caa828"}, - {file = "fastavro-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:d84b69dca296667e6137ae7c9a96d060123adbc0c00532cc47012b64d38b47e9"}, - {file = "fastavro-1.9.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fb744e9de40fb1dc75354098c8db7da7636cba50a40f7bef3b3fb20f8d189d88"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:240df8bacd13ff5487f2465604c007d686a566df5cbc01d0550684eaf8ff014a"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bb35c25bbc3904e1c02333bc1ae0173e0a44aa37a8e95d07e681601246e1f1"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b47a54a9700de3eabefd36dabfb237808acae47bc873cada6be6990ef6b165aa"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:48c7b5e6d2f3bf7917af301c275b05c5be3dd40bb04e80979c9e7a2ab31a00d1"}, - {file = "fastavro-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:05d13f98d4e325be40387e27da9bd60239968862fe12769258225c62ec906f04"}, - {file = "fastavro-1.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5b47948eb196263f6111bf34e1cd08d55529d4ed46eb50c1bc8c7c30a8d18868"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b7a66ad521298ad9373dfe1897a6ccfc38feab54a47b97922e213ae5ad8870"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44cb154f863ad80e41aea72a709b12e1533b8728c89b9b1348af91a6154ab2f5"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7f2b1fe21231fd01f1a2a90e714ae267fe633cd7ce930c0aea33d1c9f4901"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88fbbe16c61d90a89d78baeb5a34dc1c63a27b115adccdbd6b1fb6f787deacf2"}, - {file = "fastavro-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:753f5eedeb5ca86004e23a9ce9b41c5f25eb64a876f95edcc33558090a7f3e4b"}, - {file = "fastavro-1.9.5.tar.gz", hash = "sha256:6419ebf45f88132a9945c51fe555d4f10bb97c236288ed01894f957c6f914553"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "firecrawl-py" -version = "0.0.16" -description = "Python SDK for Firecrawl API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26"}, - {file = "firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "flatbuffers" -version = "24.3.25" -description = "The FlatBuffers serialization format for Python" -optional = false -python-versions = "*" -files = [ - {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, - {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.6.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "google-api-core" -version = "2.19.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-auth" -version = "2.33.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.33.0-py2.py3-none-any.whl", hash = "sha256:8eff47d0d4a34ab6265c50a106a3362de6a9975bb08998700e389f857e4d39df"}, - {file = "google_auth-2.33.0.tar.gz", hash = "sha256:d6a52342160d7290e334b4d47ba390767e4438ad0d45b7630774533e82655b95"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-cloud-aiplatform" -version = "1.62.0" -description = "Vertex AI API client library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "google-cloud-aiplatform-1.62.0.tar.gz", hash = "sha256:e15d5b2a99e30d4a16f4c51cfb8129962e6da41a9027d2ea696abe0e2f006fe8"}, - {file = "google_cloud_aiplatform-1.62.0-py2.py3-none-any.whl", hash = "sha256:d7738e0fd4494a54ae08a51755a2143d58937cba2db826189771f45566c9ee3c"}, -] - -[package.dependencies] -docstring-parser = "<1" -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" -google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" -google-cloud-storage = ">=1.32.0,<3.0.0dev" -packaging = ">=14.3" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" -pydantic = "<3" -shapely = "<3.0.0dev" - -[package.extras] -autologging = ["mlflow (>=1.27.0,<=2.1.1)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] -endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] -langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "orjson (<=3.10.6)", "tenacity (<=8.3)"] -langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "orjson (<=3.10.6)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] -lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] -metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (>=5.3.1,<7)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] -preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] -private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] -reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] -tokenization = ["sentencepiece (>=0.2.0)"] -vizier = ["google-vizier (>=0.1.6)"] -xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.25.0" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-resource-manager" -version = "1.12.5" -description = "Google Cloud Resource Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, - {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "google-cloud-storage" -version = "2.18.2" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166"}, - {file = "google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99"}, -] - -[package.dependencies] -google-api-core = ">=2.15.0,<3.0.0dev" -google-auth = ">=2.26.1,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.7.2" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<6.0.0dev)"] -tracing = ["opentelemetry-api (>=1.1.0)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-resumable-media" -version = "2.7.2" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, - {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.63.2" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "gprof2dot" -version = "2024.6.6" -description = "Generate a dot graph from the output of several profilers." -optional = false -python-versions = ">=3.8" -files = [ - {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, - {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, -] - -[[package]] -name = "gptcache" -version = "0.1.44" -description = "GPTCache, a powerful caching library that can be used to speed up and lower the cost of chat applications that rely on the LLM service. GPTCache works as a memcache for AIGC applications, similar to how Redis works for traditional applications." -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "gptcache-0.1.44-py3-none-any.whl", hash = "sha256:11ddd63b173dc3822b8c2eb7588ea947c825845ed0737b043038a238286bfec4"}, - {file = "gptcache-0.1.44.tar.gz", hash = "sha256:d3d5e6a75c57594dc58212c2d6c53a7999c23ede30e0be66d213d885c0ad0be9"}, -] - -[package.dependencies] -cachetools = "*" -numpy = "*" -requests = "*" - -[[package]] -name = "grandalf" -version = "0.8" -description = "Graph and drawing algorithms framework" -optional = false -python-versions = "*" -files = [ - {file = "grandalf-0.8-py3-none-any.whl", hash = "sha256:793ca254442f4a79252ea9ff1ab998e852c1e071b863593e5383afee906b4185"}, - {file = "grandalf-0.8.tar.gz", hash = "sha256:2813f7aab87f0d20f334a3162ccfbcbf085977134a17a5b516940a93a77ea974"}, -] - -[package.dependencies] -pyparsing = "*" - -[package.extras] -full = ["numpy", "ply"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.1" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, - {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "grpcio" -version = "1.65.4" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, - {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, - {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, - {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, - {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, - {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, - {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, - {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, - {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"}, - {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"}, - {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"}, - {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"}, - {file = "grpcio-1.65.4-cp38-cp38-linux_armv7l.whl", hash = "sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021"}, - {file = "grpcio-1.65.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7"}, - {file = "grpcio-1.65.4-cp38-cp38-win32.whl", hash = "sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a"}, - {file = "grpcio-1.65.4-cp38-cp38-win_amd64.whl", hash = "sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f"}, - {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, - {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, - {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, - {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, - {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.65.4)"] - -[[package]] -name = "grpcio-status" -version = "1.62.3" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, - {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.3" -protobuf = ">=4.21.6" - -[[package]] -name = "grpcio-tools" -version = "1.62.3" -description = "Protobuf code generator for gRPC" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0a8c0c4724ae9c2181b7dbc9b186df46e4f62cb18dc184e46d06c0ebeccf569e"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5782883a27d3fae8c425b29a9d3dcf5f47d992848a1b76970da3b5a28d424b26"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d812daffd0c2d2794756bd45a353f89e55dc8f91eb2fc840c51b9f6be62667"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b47d0dda1bdb0a0ba7a9a6de88e5a1ed61f07fad613964879954961e36d49193"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca246dffeca0498be9b4e1ee169b62e64694b0f92e6d0be2573e65522f39eea9"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win32.whl", hash = "sha256:6a56d344b0bab30bf342a67e33d386b0b3c4e65868ffe93c341c51e1a8853ca5"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win_amd64.whl", hash = "sha256:710fecf6a171dcbfa263a0a3e7070e0df65ba73158d4c539cec50978f11dad5d"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:ec6fbded0c61afe6f84e3c2a43e6d656791d95747d6d28b73eff1af64108c434"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:bfda6ee8990997a9df95c5606f3096dae65f09af7ca03a1e9ca28f088caca5cf"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b77f9f9cee87cd798f0fe26b7024344d1b03a7cd2d2cba7035f8433b13986325"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e02d3b96f2d0e4bab9ceaa30f37d4f75571e40c6272e95364bff3125a64d184"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1da38070738da53556a4b35ab67c1b9884a5dd48fa2f243db35dc14079ea3d0c"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ace43b26d88a58dcff16c20d23ff72b04d0a415f64d2820f4ff06b1166f50557"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-win_amd64.whl", hash = "sha256:350a80485e302daaa95d335a931f97b693e170e02d43767ab06552c708808950"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:c3a1ac9d394f8e229eb28eec2e04b9a6f5433fa19c9d32f1cb6066e3c5114a1d"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:11f363570dea661dde99e04a51bd108a5807b5df32a6f8bdf4860e34e94a4dbf"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9ad9950119d8ae27634e68b7663cc8d340ae535a0f80d85a55e56a6973ab1f"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c5d22b252dcef11dd1e0fbbe5bbfb9b4ae048e8880d33338215e8ccbdb03edc"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:27cd9ef5c5d68d5ed104b6dcb96fe9c66b82050e546c9e255716903c3d8f0373"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f4b1615adf67bd8bb71f3464146a6f9949972d06d21a4f5e87e73f6464d97f57"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win32.whl", hash = "sha256:e18e15287c31baf574fcdf8251fb7f997d64e96c6ecf467906e576da0a079af6"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win_amd64.whl", hash = "sha256:6c3064610826f50bd69410c63101954676edc703e03f9e8f978a135f1aaf97c1"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:8e62cc7164b0b7c5128e637e394eb2ef3db0e61fc798e80c301de3b2379203ed"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c8ad5cce554e2fcaf8842dee5d9462583b601a3a78f8b76a153c38c963f58c10"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec279dcf3518201fc592c65002754f58a6b542798cd7f3ecd4af086422f33f29"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c989246c2aebc13253f08be32538a4039a64e12d9c18f6d662d7aee641dc8b5"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca4f5eeadbb57cf03317d6a2857823239a63a59cc935f5bd6cf6e8b7af7a7ecc"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0cb3a3436ac119cbd37a7d3331d9bdf85dad21a6ac233a3411dff716dcbf401e"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win32.whl", hash = "sha256:3eae6ea76d62fcac091e1f15c2dcedf1dc3f114f8df1a972a8a0745e89f4cf61"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win_amd64.whl", hash = "sha256:eec73a005443061f4759b71a056f745e3b000dc0dc125c9f20560232dfbcbd14"}, -] - -[package.dependencies] -grpcio = ">=1.62.3" -protobuf = ">=4.21.6,<5.0dev" -setuptools = "*" - -[[package]] -name = "gunicorn" -version = "22.0.0" -description = "WSGI HTTP Server for UNIX" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, -] - -[package.dependencies] -packaging = "*" - -[package.extras] -eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] -tornado = ["tornado (>=0.2)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "h2" -version = "4.1.0" -description = "HTTP/2 State-Machine based protocol implementation" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] - -[package.dependencies] -hpack = ">=4.0,<5" -hyperframe = ">=6.0,<7" - -[[package]] -name = "hpack" -version = "4.0.0" -description = "Pure-Python HPACK header compression" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "httpx-sse" -version = "0.4.0" -description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, - {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, -] - -[[package]] -name = "huggingface-hub" -version = "0.24.5" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.24.5-py3-none-any.whl", hash = "sha256:d93fb63b1f1a919a22ce91a14518974e81fc4610bf344dfe7572343ce8d3aced"}, - {file = "huggingface_hub-0.24.5.tar.gz", hash = "sha256:7b45d6744dd53ce9cbf9880957de00e9d10a9ae837f1c9b7255fc8fa4e8264f3"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors[torch]", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "humanfriendly" -version = "10.0" -description = "Human friendly output for text interfaces using Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, - {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, -] - -[package.dependencies] -pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} - -[[package]] -name = "hyperframe" -version = "6.0.1" -description = "HTTP/2 framing layer for Python" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] - -[[package]] -name = "identify" -version = "2.6.0" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.2" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.2-py3-none-any.whl", hash = "sha256:8bba8c54a8a3afaa1419910845fa26ebd706dc716dd208d9b158b4b6966f5c5c"}, - {file = "importlib_resources-6.4.2.tar.gz", hash = "sha256:6cbfbefc449cc6e2095dd184691b7a12a04f40bc75dd4c55d31c34f174cdf57a"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "instructor" -version = "1.3.3" -description = "structured outputs for llm" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "instructor-1.3.3-py3-none-any.whl", hash = "sha256:94b114b39a1181fa348d162e6e4ff5c4d985324736020c0233fed5d4db444dbd"}, - {file = "instructor-1.3.3.tar.gz", hash = "sha256:e27bf3c1187b0b2130ea38ecde7c2b4f571d6a5ce1397fb15c27490988b45441"}, -] - -[package.dependencies] -aiohttp = ">=3.9.1,<4.0.0" -docstring-parser = ">=0.16,<0.17" -jiter = ">=0.4.1,<0.5.0" -openai = ">=1.1.0,<2.0.0" -pydantic = ">=2.7.0,<3.0.0" -pydantic-core = ">=2.18.0,<3.0.0" -rich = ">=13.7.0,<14.0.0" -tenacity = ">=8.2.3,<9.0.0" -typer = ">=0.9.0,<1.0.0" - -[package.extras] -anthropic = ["anthropic (>=0.27.0,<0.28.0)", "xmltodict (>=0.13.0,<0.14.0)"] -cohere = ["cohere (>=5.1.8,<6.0.0)"] -google-generativeai = ["google-generativeai (>=0.5.4,<0.6.0)"] -groq = ["groq (>=0.4.2,<0.5.0)"] -litellm = ["litellm (>=1.35.31,<2.0.0)"] -mistralai = ["mistralai (>=0.1.8,<0.2.0)"] -test-docs = ["anthropic (>=0.27.0,<0.28.0)", "cohere (>=5.1.8,<6.0.0)", "diskcache (>=5.6.3,<6.0.0)", "fastapi (>=0.109.2,<0.110.0)", "groq (>=0.4.2,<0.5.0)", "litellm (>=1.35.31,<2.0.0)", "mistralai (>=0.1.8,<0.2.0)", "pandas (>=2.2.0,<3.0.0)", "pydantic_extra_types (>=2.6.0,<3.0.0)", "redis (>=5.0.1,<6.0.0)", "tabulate (>=0.9.0,<0.10.0)"] -vertexai = ["google-cloud-aiplatform (>=1.52.0,<2.0.0)", "jsonref (>=1.1.0,<2.0.0)"] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.26.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, - {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jiter" -version = "0.4.2" -description = "Fast iterable JSON parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jiter-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c2b003ff58d14f5e182b875acd5177b2367245c19a03be9a2230535d296f7550"}, - {file = "jiter-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48c77c25f094707731cd5bad6b776046846b60a27ee20efc8fadfb10a89415f"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f50ad6b172bde4d45f4d4ea10c49282a337b8bb735afc99763dfa55ea84a743"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f6001e86f525fbbc9706db2078dc22be078b0950de55b92d37041930f5f940"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16646ef23b62b007de80460d303ebb2d81e355dac9389c787cec87cdd7ffef2f"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b4e847c13b0bf1255c711a92330e7a8cb8b5cdd1e37d7db309627bcdd3367ff"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c536589be60e4c5f2b20fadc4db7e9f55d4c9df3551f29ddf1c4a18dcc9dd54"}, - {file = "jiter-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3b2763996167830889a854b4ded30bb90897f9b76be78069c50c3ec4540950e"}, - {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:675e8ab98c99495091af6b6e9bf2b6353bcf81f25ab6ce27d36127e315b4505d"}, - {file = "jiter-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e48e43d9d999aaf55f53406b8846ff8cbe3e47ee4b9dc37e5a10a65ce760809f"}, - {file = "jiter-0.4.2-cp310-none-win32.whl", hash = "sha256:881b6e67c50bc36acb3570eda693763c8cd77d590940e06fa6d325d0da52ec1b"}, - {file = "jiter-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:bb8f7b43259efc6add0d721ade2953e064b24e2026d26d979bc09ec080844cef"}, - {file = "jiter-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:24ad336ac47f274fa83f6fbedcabff9d3387c80f67c66b992688e6a8ba2c47e9"}, - {file = "jiter-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc392a220095730afe365ce1516f2f88bb085a2fd29ea191be9c6e3c71713d9a"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1fdc408de36c81460896de0176f2f7b9f3574dcd35693a0b2c00f4ca34c98e4"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10ad76722ee6a8c820b0db06a793c08b7d679e5201b9563015bd1e06c959a09"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbb46d1e9c82bba87f0cbda38413e49448a7df35b1e55917124bff9f38974a23"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:194e28ef4b5f3b61408cb2ee6b6dcbcdb0c9063d01b92b01345b7605692849f5"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0a447533eccd62748a727e058efa10a8d7cf1de8ffe1a4d705ecb41dad9090"}, - {file = "jiter-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5f7704d7260bbb88cca3453951af739589132b26e896a3144fa2dae2263716d7"}, - {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01427458bc9550f2eda09d425755330e7d0eb09adce099577433bebf05d28d59"}, - {file = "jiter-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159b8416879c0053b17c352f70b67b749ef5b2924c6154318ecf71918aab0905"}, - {file = "jiter-0.4.2-cp311-none-win32.whl", hash = "sha256:f2445234acfb79048ce1a0d5d0e181abb9afd9e4a29d8d9988fe26cc5773a81a"}, - {file = "jiter-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:e15a65f233b6b0e5ac10ddf3b97ceb18aa9ffba096259961641d78b4ee321bd5"}, - {file = "jiter-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d61d59521aea9745447ce50f74d39a16ef74ec9d6477d9350d77e75a3d774ad2"}, - {file = "jiter-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eef607dc0acc251923427808dbd017f1998ae3c1a0430a261527aa5cbb3a942"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af6bf39954646e374fc47429c656372ac731a6a26b644158a5a84bcdbed33a47"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f509d23606e476852ee46a2b65b5c4ad3905f17424d9cc19c1dffa1c94ba3c6"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59672774daa44ee140aada0c781c82bee4d9ac5e522966186cfb6b3c217d8a51"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a0458efac5afeca254cf557b8a654e17013075a69905c78f88d557f129d871"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8860766d1c293e75c1bb4e25b74fa987e3adf199cac3f5f9e6e49c2bebf092f"}, - {file = "jiter-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a109f3281b72bbf4921fe43db1005c004a38559ca0b6c4985add81777dfe0a44"}, - {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:faa7e667454b77ad2f0ef87db39f4944de759617aadf210ea2b73f26bb24755f"}, - {file = "jiter-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3512f8b00cafb6780b427cb6282800d2bf8277161d9c917830661bd4ed1d3528"}, - {file = "jiter-0.4.2-cp312-none-win32.whl", hash = "sha256:853b35d508ee5b66d06630473c1c0b7bb5e29bf4785c9d2202437116c94f7e21"}, - {file = "jiter-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:4a3a8197784278eb8b24cb02c45e1cad67c2ce5b5b758adfb19b87f74bbdff9c"}, - {file = "jiter-0.4.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ca2a4d750aed3154b89f2efb148609fc985fad8db739460797aaf9b478acedda"}, - {file = "jiter-0.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0e6c304b3cc6896256727e1fb8991c7179a345eca8224e201795e9cacf4683b0"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cc34ac708ae1750d077e490321761ec4b9a055b994cbdd1d6fbd37099e4aa7b"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c93383875ab8d2e4f760aaff335b4a12ff32d4f9cf49c4498d657734f611466"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce197ee044add576afca0955b42142dd0312639adb6ebadbdbe4277f2855614f"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a427716813ff65480ca5b5117cfa099f49b49cd38051f8609bd0d5493013ca0"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479990218353356234669e70fac53e5eb6f739a10db25316171aede2c97d9364"}, - {file = "jiter-0.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d35a91ec5ac74cf33234c431505299fa91c0a197c2dbafd47400aca7c69489d4"}, - {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b27189847193708c94ad10ca0d891309342ae882725d2187cf5d2db02bde8d1b"}, - {file = "jiter-0.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76c255308cd1093fb411a03756b7bb220e48d4a98c30cbc79ed448bf3978e27d"}, - {file = "jiter-0.4.2-cp38-none-win32.whl", hash = "sha256:bb77438060bad49cc251941e6701b31138365c8a0ddaf10cdded2fcc6dd30701"}, - {file = "jiter-0.4.2-cp38-none-win_amd64.whl", hash = "sha256:ce858af19f7ce0d4b51c9f6c0c9d08f1e9dcef1986c5875efd0674a7054292ca"}, - {file = "jiter-0.4.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:6128838a2f357b3921b2a3242d5dc002ae4255ecc8f9f05c20d56d7d2d79c5ad"}, - {file = "jiter-0.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f2420cebb9ba856cb57dcab1d2d8def949b464b0db09c22a4e4dbd52fff7b200"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5d13d8128e853b320e00bb18bd4bb8b136cc0936091dc87633648fc688eb705"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eba5d6e54f149c508ba88677f97d3dc7dd75e9980d234bbac8027ac6db0763a3"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fad5d64af0bc0545237419bf4150d8de56f0bd217434bdd1a59730327252bef"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d179e7bca89cf5719bd761dd37a341ff0f98199ecaa9c14af09792e47e977cc"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36353caee9f103d8ee7bda077f6400505b0f370e27eabcab33a33d21de12a2a6"}, - {file = "jiter-0.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dd146c25bce576ca5db64fc7eccb8862af00f1f0e30108796953f12a53660e4c"}, - {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:14b7c08cadbcd703041c66dc30e24e17de2f340281cac0e69374223ecf153aa4"}, - {file = "jiter-0.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a90f1a8b3d29aea198f8ea2b01148276ced8056e5103f32525266b3d880e65c9"}, - {file = "jiter-0.4.2-cp39-none-win32.whl", hash = "sha256:25b174997c780337b61ae57b1723455eecae9a17a9659044fd3c3b369190063f"}, - {file = "jiter-0.4.2-cp39-none-win_amd64.whl", hash = "sha256:bef62cea18521c5b99368147040c7e560c55098a35c93456f110678a2d34189a"}, - {file = "jiter-0.4.2.tar.gz", hash = "sha256:29b9d44f23f0c05f46d482f4ebf03213ee290d77999525d0975a17f875bf1eea"}, -] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "jq" -version = "1.7.0" -description = "jq is a lightweight and flexible JSON processor." -optional = false -python-versions = ">=3.5" -files = [ - {file = "jq-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8fae014fa8b2704322a5baa39c112176d9acb71e22ebdb8e21c1c864ecff654"}, - {file = "jq-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40fe068d1fdf2c712885b69be90ddb3e61bca3e4346ab3994641a4fbbeb7be82"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ec105a0057f2f922d195e1d75d4b0ae41c4b38655ead04d1a3a47988fcb1939"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38e2041ca578275334eff9e1d913ae386210345e5ae71cd9c16e3f208dc81deb"}, - {file = "jq-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce1df1b6fffeeeb265d4ea3397e9875ab170ba5a7af6b7997c2fd755934df065"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05ebdaa868f068967d9e7cbf76e59e61fbdafa565dbc3579c387fb1f248592bb"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b3f916cb812fcd26bb1b006634d9c0eff240090196ca0ebb5d229b344f624e53"}, - {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ad7749a16a16bafd6cebafd5e40990b641b4b6b7b661326864677effc44a500"}, - {file = "jq-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e99ea17b708f55e8bed2f4f68c022119184b17eb15987b384db12e8b6702bd5"}, - {file = "jq-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76735cd19de65c15964d330adbc2c84add8e55dea35ebfe17b9acf88a06a7d57"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b841ddd9089429fc0621d07d1c34ff24f7d6a6245c10125b82806f61e36ae8"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d6b1fc2515b7be92195d50b68f82329cc0250c7fbca790b887d74902ba33870"}, - {file = "jq-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6546a57a3ceeed41961be2f1417b4e7a5b3170cca7bb82f5974d2ba9acaab6"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3427ad0f377f188953958e36b76167c8d11b8c8c61575c22deafa4aba58d601f"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:79b9603219fa5082df97d265d71c426613286bd0e5378a8739ce39056fa1e2dc"}, - {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2981a24765a747163e0daa23648372b72a006e727895b95d032632aa51094bd"}, - {file = "jq-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a0cc15b2ed511a1a8784c7c7dc07781e28d84a65934062de52487578732e0514"}, - {file = "jq-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90032c2c4e710157d333d166818ede8b9c8ef0f697e59c9427304edc47146f3d"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e715d5f0bdfc0be0ff33cd0a3f6f51f8bc5ad464fab737e2048a1b46b45bb582"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cc5a1ca3a540a5753dbd592f701c1ec7c9cc256becba604490283c055f3f1c"}, - {file = "jq-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:293b6e8e4b652d96fdeae7dd5ffb1644199d8b6fc1f95d528c16451925c0482e"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f103868b8902d4ee7f643248bdd7a2de9f9396e4b262f42745b9f624c834d07a"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e9c5ccfa3cf65f92b60c5805ef725f7cd799f2dc16e8601c6e8f12f38a9f48f3"}, - {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ca25608d51fdbf8bd5c682b433e1cb9f497155a7c1ea5901524df099f1ceff3"}, - {file = "jq-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6a2d34d962ce2da5136dab2664fc7efad9f71024d0dc328702f2dc70b4e2735c"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:757e8c4cb0cb1175f0aaa227f0a26e4765ba5da04d0bc875b0bd933eff6bd0a0"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d097098a628171b87961fb0400117ac340b1eb40cbbee2e58208c4254c23c20"}, - {file = "jq-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45bc842806d71bd5839c190a88fd071ac5a0a8a1dd601e83228494a19f14559c"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0629743417f8709305d1f77d3929493912efdc3fd1cce3a7fcc76b81bc6b82d"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:9b9a49e8b14d3a368011ed1412c8c3e193a7135d5eb4310d77ee643470112b47"}, - {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a10e3f88b6d2bbb4c47b368f919ec7b648196bf9c60a5cc921d04239d68240c2"}, - {file = "jq-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aa85b47effb4152e1cf1120607f475a1c11395d072323ff23e8bb59ce6752713"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9413f67ea28037e37ccf8951f9f0b380f31d79162f33e216faa6bd0d8eca0dc7"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3daf3b3443c4e871c23ac1e698eb70d1225b46a4ac79c73968234adcd70f3ed8"}, - {file = "jq-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbe03f95ab02dc045691c3b5c7da8d8c2128e60450fb2124ea8b49034c74f158"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a6b2e9f4e63644a30726c58c25d80015f9b83325b125615a46e10d4439b9dc99"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9fffcffc8e56585223878edd7c5d719eb8547281d64af2bac43911f1bb9e7029"}, - {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:95d4bcd5a999ce0aaadaadcaca967989f0efc96c1097a81746b21b6126cf7aaf"}, - {file = "jq-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0137445eb67c43eb0eb46933aff7e8afbbd6c5aaf8574efd5df536dc9d177d1d"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee0e9307b6d4fe89a8556a92c1db65e0d66218bcc13fdeb92a09645a55ff87a"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e0f95cecb690df66f23a8d76c746d2ed15671de3f6101140e3fe2b98b97e0a8"}, - {file = "jq-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95e472aa54efe418d3627dcd2a369ac0b21e1a5e352550144fd5f0c40585a5b7"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4be2a2b56fa139f3235cdb8422ea16eccdd48d62bf91d9fac10761cd55d26c84"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7db8260ecb57827bb3fb6f44d4a6f0db0570ded990eee95a5fd3ac9ba14f60d7"}, - {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fdbb7ff2dfce2cc0f421f498dcb64176997bd9d9e6cab474e59577e7bff3090d"}, - {file = "jq-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:396bef4b4c9c1ebe3e0e04e287bc79a861b991e12db45681c398d3906ee85468"}, - {file = "jq-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18d8a81c6e241585a0bf748903082d65c4eaa6ba80248f507e5cebda36e05c6c"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade00a39990fdfe0acc7d2a900e3e5e6b11a71eb5289954ff0df31ac0afae25b"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c777e88f3cce496c17f5c3bdbc7d74ff12b5cbdaea30f3a374f3cc92e5bba8d"}, - {file = "jq-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79957008c67d8f1d9134cd0e01044bff5d795f7e94db9532a9fe9212e1f88a77"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2bc5cb77dd12e861296cfa69587aa6797ccfee4f5f3aa571b02f0273ab1efec1"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e10a5937aab9c383632ab151f73d43dc0c4be99f62221a7044988dc8ddd4bdc"}, - {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e6e13e0f8d3204aefe861159160116e822c90bae773a3ccdd4d9e79a06e086e"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0cdbd32463ef632b0b4ca6dab434e2387342bc5c895b411ec6b2a14bbf4b2c12"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:558a5c6b4430e05fa59c4b5631c0d3fc0f163100390c03edc1993663f59d8a9b"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bbf77138cdd8d306bf335d998525a0477e4cb6f00eb6f361288f5b82274e84c"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e6919481ff43754ae9b17a98c877995d5e1346be114c71cd0dfd8ff7d0cd60"}, - {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b0584ff33b2a9cc021edec325af4e0fa9fbd54cce80c1f7b8e0ba4cf2d75508"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6e7259880ab7e75e845fb4d56c6d18922c68789d25d7cdbb6f433d9e714613a"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d472cdd0bcb3d47c87b00ff841edff41c79fe4422523c4a7c8bf913fb950f7f"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3430de179f8a7b0baf5675d5ee400f97344085d79f190a90fc0c7df990cbcc"}, - {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb375bdb2a44f1a643123b8ec57563bb5542673f0399799ab5662ce90bf4a5"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39a0c71ed2f1ec0462d54678333f1b14d9f25fd62a9f46df140d68552f79d204"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:306c1e3ba531d7dc3284e128689f0b75409a4e8e8a3bdac2c51cc26f2d3cca58"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88b8b0cc838c7387dc5e8c45b192c7504acd0510514658d2d5cd1716fcf15fe3"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c75e16e542f4abaae25727b9fc4eeaf69cb07122be8a2a7672d02feb3a1cc9a"}, - {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4828ac689a67fd9c021796bcacd95811bab806939dd6316eb0c2d3de016c584"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:c94f95b27720d2db7f1039fdd371f70bc0cac8e204cbfd0626176d7b8a3053d6"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5ff445fc9b1eb4623a914e04bea9511e654e9143cde82b039383af4f7dc36f2"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07e369ff021fad38a29d6a7a3fc24f7d313e9a239b15ce4eefaffee637466400"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553dfbf674069cb20533d7d74cd8a9d7982bab8e4a5b473fde105d99278df09f"}, - {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9fbc76f6fec66e5e58cc84f20a5de80addd3c64ad87a748f5c5f6b4ef01bc8c"}, - {file = "jq-1.7.0.tar.gz", hash = "sha256:f460d1f2c3791617e4fb339fa24efbdbebe672b02c861f057358553642047040"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonref" -version = "1.1.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"}, - {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"}, -] - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "kubernetes" -version = "30.1.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - -[[package]] -name = "langchain" -version = "0.2.13" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain-0.2.13-py3-none-any.whl", hash = "sha256:80f21e48cdada424dd2af9bbf42234fe095744cf181b31eeb63d1da7479e2783"}, - {file = "langchain-0.2.13.tar.gz", hash = "sha256:947e96ac3153a46aa6a0d8207e5f8b6794084c397f60a01bbf4bba78e6838fee"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.2.30,<0.3.0" -langchain-text-splitters = ">=0.2.0,<0.3.0" -langsmith = ">=0.1.17,<0.2.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -pydantic = ">=1,<3" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" - -[[package]] -name = "langchain-cohere" -version = "0.1.9" -description = "An integration package connecting Cohere and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_cohere-0.1.9-py3-none-any.whl", hash = "sha256:96d6a15125797319474ac84b54024e5024f3f5fc45032ebf228d95d6998c9b13"}, - {file = "langchain_cohere-0.1.9.tar.gz", hash = "sha256:549620d23bc3d77f62d1045787095fe2c1cfa233dba69455139f9a2f65f952fa"}, -] - -[package.dependencies] -cohere = ">=5.5.6,<6.0" -langchain-core = ">=0.2.2,<0.3" -langchain-experimental = ">=0.0.6" -pandas = ">=1.4.3" -tabulate = ">=0.9.0,<0.10.0" - -[package.extras] -langchain-community = ["langchain-community (>=0.2.4)"] - -[[package]] -name = "langchain-community" -version = "0.2.12" -description = "Community contributed LangChain integrations." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_community-0.2.12-py3-none-any.whl", hash = "sha256:50e74473dd2309bdef561760afbbf0c5ea17ed91fc4dfa0d52279dd16d6d34e0"}, - {file = "langchain_community-0.2.12.tar.gz", hash = "sha256:d671cfc6a4f3b65f49a2e59ab420d0164f109d0a56fc4b4996518205c63b8c7e"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -dataclasses-json = ">=0.5.7,<0.7" -langchain = ">=0.2.13,<0.3.0" -langchain-core = ">=0.2.30,<0.3.0" -langsmith = ">=0.1.0,<0.2.0" -numpy = [ - {version = ">=1,<2", markers = "python_version < \"3.12\""}, - {version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""}, -] -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" - -[[package]] -name = "langchain-core" -version = "0.2.32" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_core-0.2.32-py3-none-any.whl", hash = "sha256:1f5584cf0034909e35ea17010a847d4079417e0ddcb5a9eb3fbb2bd55f3268c0"}, - {file = "langchain_core-0.2.32.tar.gz", hash = "sha256:d82cdc350bbbe74261330d87056b7d9f1fb567828e9e03f708d23a48b941819e"}, -] - -[package.dependencies] -jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.75,<0.2.0" -packaging = ">=23.2,<25" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" -typing-extensions = ">=4.7" - -[[package]] -name = "langchain-experimental" -version = "0.0.61" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_experimental-0.0.61-py3-none-any.whl", hash = "sha256:f9c516f528f55919743bd56fe1689a53bf74ae7f8902d64b9d8aebc61249cbe2"}, - {file = "langchain_experimental-0.0.61.tar.gz", hash = "sha256:e9538efb994be5db3045cc582cddb9787c8299c86ffeee9d3779b7f58eef2226"}, -] - -[package.dependencies] -langchain-community = ">=0.2.5,<0.3.0" -langchain-core = ">=0.2.7,<0.3.0" - -[[package]] -name = "langchain-openai" -version = "0.1.21" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_openai-0.1.21-py3-none-any.whl", hash = "sha256:44420f0c84859ae236a80c8ac8754a16d5b660c24377c27ba98308145d346352"}, - {file = "langchain_openai-0.1.21.tar.gz", hash = "sha256:2c65feaf12bb284eccf7bce35725fd06f3035fa751babad6aa84af2f99867f88"}, -] - -[package.dependencies] -langchain-core = ">=0.2.29,<0.3.0" -openai = ">=1.40.0,<2.0.0" -tiktoken = ">=0.7,<1" - -[[package]] -name = "langchain-text-splitters" -version = "0.2.2" -description = "LangChain text splitting utilities" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"}, - {file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"}, -] - -[package.dependencies] -langchain-core = ">=0.2.10,<0.3.0" - -[[package]] -name = "langchainhub" -version = "0.1.21" -description = "The LangChain Hub API client" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchainhub-0.1.21-py3-none-any.whl", hash = "sha256:1cc002dc31e0d132a776afd044361e2b698743df5202618cf2bad399246b895f"}, - {file = "langchainhub-0.1.21.tar.gz", hash = "sha256:723383b3964a47dbaea6ad5d0ef728accefbc9d2c07480e800bdec43510a8c10"}, -] - -[package.dependencies] -packaging = ">=23.2,<25" -requests = ">=2,<3" -types-requests = ">=2.31.0.2,<3.0.0.0" - -[[package]] -name = "langsmith" -version = "0.1.99" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, - {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, -] - -[package.dependencies] -orjson = ">=3.9.14,<4.0.0" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] -requests = ">=2,<3" - -[[package]] -name = "loguru" -version = "0.7.2" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = ">=3.5" -files = [ - {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, - {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] - -[[package]] -name = "lxml" -version = "5.3.0" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=3.6" -files = [ - {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, - {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, - {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, - {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, - {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, - {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, - {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, - {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, - {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, - {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, - {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, - {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, - {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, - {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, - {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, - {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, - {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, - {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, - {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, - {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, - {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, - {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, - {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, - {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, - {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, - {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, - {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, - {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, - {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, - {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, - {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, - {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, - {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, - {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, - {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, - {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, - {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, - {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, - {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, - {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, - {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, - {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, - {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, - {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, - {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, - {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, - {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, - {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, - {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, - {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, - {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, - {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html-clean = ["lxml-html-clean"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.11)"] - -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.3" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mem0ai" -version = "0.0.9" -description = "Long-term memory for AI Agents" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "mem0ai-0.0.9-py3-none-any.whl", hash = "sha256:d4de435729af4fd3d597d022ffb2af89a0630d6c3b4769792bbe27d2ce816858"}, - {file = "mem0ai-0.0.9.tar.gz", hash = "sha256:e4374d5d04aa3f543cd3325f700e4b62f5358ae1c6fa5c44b2ff790c10c4e5f1"}, -] - -[package.dependencies] -openai = ">=1.33.0,<2.0.0" -posthog = ">=3.5.0,<4.0.0" -pydantic = ">=2.7.3,<3.0.0" -qdrant-client = ">=1.9.1,<2.0.0" - -[[package]] -name = "mmh3" -version = "4.1.0" -description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -optional = false -python-versions = "*" -files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, -] - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] - -[[package]] -name = "monotonic" -version = "1.6" -description = "An implementation of time.monotonic() for Python 2 & < 3.3" -optional = false -python-versions = "*" -files = [ - {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, - {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, -] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "multiprocess" -version = "0.70.16" -description = "better multiprocessing and multithreading in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, - {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, - {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, - {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, - {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, - {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, - {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, -] - -[package.dependencies] -dill = ">=0.3.8" - -[[package]] -name = "mypy" -version = "1.11.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nanoid" -version = "2.0.0" -description = "A tiny, secure, URL-friendly, unique string ID generator for Python" -optional = false -python-versions = "*" -files = [ - {file = "nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb"}, - {file = "nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "onnxruntime" -version = "1.18.1" -description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -optional = false -python-versions = "*" -files = [ - {file = "onnxruntime-1.18.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:29ef7683312393d4ba04252f1b287d964bd67d5e6048b94d2da3643986c74d80"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win32.whl", hash = "sha256:221e5b16173926e6c7de2cd437764492aa12b6811f45abd37024e7cf2ae5d7e3"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:75211b619275199c861ee94d317243b8a0fcde6032e5a80e1aa9ded8ab4c6060"}, - {file = "onnxruntime-1.18.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f26582882f2dc581b809cfa41a125ba71ad9e715738ec6402418df356969774a"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win32.whl", hash = "sha256:9b6a33419b6949ea34e0dc009bc4470e550155b6da644571ecace4b198b0d88f"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c1380a9f1b7788da742c759b6a02ba771fe1ce620519b2b07309decbd1a2fe1"}, - {file = "onnxruntime-1.18.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:31bd57a55e3f983b598675dfc7e5d6f0877b70ec9864b3cc3c3e1923d0a01919"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win32.whl", hash = "sha256:3a2d9ab6254ca62adbb448222e630dc6883210f718065063518c8f93a32432be"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ad93c560b1c38c27c0275ffd15cd7f45b3ad3fc96653c09ce2931179982ff204"}, - {file = "onnxruntime-1.18.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:3b55dc9d3c67626388958a3eb7ad87eb7c70f75cb0f7ff4908d27b8b42f2475c"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f80dbcfb6763cc0177a31168b29b4bd7662545b99a19e211de8c734b657e0669"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1ff2c61a16d6c8631796c54139bafea41ee7736077a0fc64ee8ae59432f5c58"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win32.whl", hash = "sha256:219855bd272fe0c667b850bf1a1a5a02499269a70d59c48e6f27f9c8bcb25d02"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdf16aa607eb9a2c60d5ca2d5abf9f448e90c345b6b94c3ed14f4fb7e6a2d07"}, - {file = "onnxruntime-1.18.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:128df253ade673e60cea0955ec9d0e89617443a6d9ce47c2d79eb3f72a3be3de"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win32.whl", hash = "sha256:34657c78aa4e0b5145f9188b550ded3af626651b15017bf43d280d7e23dbf195"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:9c14fd97c3ddfa97da5feef595e2c73f14c2d0ec1d4ecbea99c8d96603c89589"}, -] - -[package.dependencies] -coloredlogs = "*" -flatbuffers = "*" -numpy = ">=1.21.6,<2.0" -packaging = "*" -protobuf = "*" -sympy = "*" - -[[package]] -name = "openai" -version = "1.40.6" -description = "The official Python library for the openai API" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-1.40.6-py3-none-any.whl", hash = "sha256:b36372124a779381a420a34dd96f762baa748b6bdfaf83a6b9f2745f72ccc1c5"}, - {file = "openai-1.40.6.tar.gz", hash = "sha256:2239232bcb7f4bd4ce8e02544b5769618582411cf399816d96686d1b6c1e5c8d"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.11,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - -[[package]] -name = "opentelemetry-api" -version = "1.25.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, - {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.1" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.25.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3"}, -] - -[package.dependencies] -opentelemetry-proto = "1.25.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.25.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.25.0" -opentelemetry-proto = "1.25.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.25.0" -description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl", hash = "sha256:2eca686ee11b27acd28198b3ea5e5863a53d1266b91cda47c839d95d5e0541a6"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.25.0.tar.gz", hash = "sha256:9f8723859e37c75183ea7afa73a3542f01d0fd274a5b97487ea24cb683d7d684"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.25.0" -opentelemetry-proto = "1.25.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" -requests = ">=2.7,<3.0" - -[[package]] -name = "opentelemetry-exporter-prometheus" -version = "0.46b0" -description = "Prometheus Metric Exporter for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_prometheus-0.46b0-py3-none-any.whl", hash = "sha256:caefdeea5c4d52b72479710d22cc4c469d42fa1dba2f4a2e46ae0ebeaf51cd96"}, - {file = "opentelemetry_exporter_prometheus-0.46b0.tar.gz", hash = "sha256:28cc6456a5d5bf49c34be2f1d22bbc761c36af9b32d909ea5b4c13fe6deac47b"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-sdk = ">=1.25.0,<1.26.0" -prometheus-client = ">=0.5.0,<1.0.0" - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.46b0" -description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b"}, - {file = "opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.4,<2.0" -setuptools = ">=16.0" -wrapt = ">=1.0.0,<2.0.0" - -[[package]] -name = "opentelemetry-instrumentation-asgi" -version = "0.46b0" -description = "ASGI instrumentation for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759"}, - {file = "opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a"}, -] - -[package.dependencies] -asgiref = ">=3.0,<4.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.46b0" -opentelemetry-semantic-conventions = "0.46b0" -opentelemetry-util-http = "0.46b0" - -[package.extras] -instruments = ["asgiref (>=3.0,<4.0)"] - -[[package]] -name = "opentelemetry-instrumentation-fastapi" -version = "0.46b0" -description = "OpenTelemetry FastAPI Instrumentation" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33"}, - {file = "opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.46b0" -opentelemetry-instrumentation-asgi = "0.46b0" -opentelemetry-semantic-conventions = "0.46b0" -opentelemetry-util-http = "0.46b0" - -[package.extras] -instruments = ["fastapi (>=0.58,<1.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.25.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f"}, - {file = "opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.25.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, - {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, -] - -[package.dependencies] -opentelemetry-api = "1.25.0" -opentelemetry-semantic-conventions = "0.46b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.46b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, - {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, -] - -[package.dependencies] -opentelemetry-api = "1.25.0" - -[[package]] -name = "opentelemetry-util-http" -version = "0.46b0" -description = "Web util for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629"}, - {file = "opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6"}, -] - -[[package]] -name = "orjson" -version = "3.10.0" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, - {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, - {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, - {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, - {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, - {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, - {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, - {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, - {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, - {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, - {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, - {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, - {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, - {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, - {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, - {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandas-stubs" -version = "2.2.2.240807" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, - {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, -] - -[package.dependencies] -numpy = ">=1.23.5" -types-pytz = ">=2022.1.1" - -[[package]] -name = "parameterized" -version = "0.9.0" -description = "Parameterized testing with any Python test framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, - {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, -] - -[package.extras] -dev = ["jinja2"] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "passlib" -version = "1.7.4" -description = "comprehensive password hashing framework supporting over 30 schemes" -optional = false -python-versions = "*" -files = [ - {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, - {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, -] - -[package.extras] -argon2 = ["argon2-cffi (>=18.2.0)"] -bcrypt = ["bcrypt (>=3.1.0)"] -build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] -totp = ["cryptography"] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "portalocker" -version = "2.10.1" -description = "Wraps the portalocker recipe for easy usage" -optional = false -python-versions = ">=3.8" -files = [ - {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, - {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, -] - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] - -[[package]] -name = "posthog" -version = "3.5.0" -description = "Integrate PostHog into any python application." -optional = false -python-versions = "*" -files = [ - {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, - {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, -] - -[package.dependencies] -backoff = ">=1.10.0" -monotonic = ">=1.5" -python-dateutil = ">2.1" -requests = ">=2.7,<3.0" -six = ">=1.5" - -[package.extras] -dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] -sentry = ["django", "sentry-sdk"] -test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] - -[[package]] -name = "pre-commit" -version = "3.8.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" - -[package.extras] -testing = ["google-api-core (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.4" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, -] - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pulsar-client" -version = "3.5.0" -description = "Apache Pulsar Python client library" -optional = false -python-versions = "*" -files = [ - {file = "pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38"}, - {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b"}, - {file = "pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f"}, - {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319"}, - {file = "pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa"}, - {file = "pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7"}, - {file = "pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da"}, - {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b"}, - {file = "pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6"}, - {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1"}, - {file = "pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842"}, - {file = "pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0"}, - {file = "pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d"}, - {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17"}, - {file = "pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a"}, - {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290"}, - {file = "pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc"}, - {file = "pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af"}, - {file = "pulsar_client-3.5.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:15b58f5d759dd6166db8a2d90ed05a38063b05cda76c36d190d86ef5c9249397"}, - {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af34bfe813dddf772a8a298117fa0a036ee963595d8bc8f00d969a0329ae6ed9"}, - {file = "pulsar_client-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0fec1dd74e1367d3742ce16679c1807994df60f5e666f440cf39323938fad"}, - {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbcd26ef9c03f96fb9cd91baec3bbd3c4b997834eb3556670d31f41cc25b5f64"}, - {file = "pulsar_client-3.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:afea1d0b6e793fd56e56463145751ff3aa79fdcd5b26e90d0da802a1bbabe07e"}, - {file = "pulsar_client-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:da1ab2fb1bef64b966e9403a0a186ebc90368d99e054ce2cae5b1128478f4ef4"}, - {file = "pulsar_client-3.5.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:9ad5dcc0eb8d2a7c0fb8e1fa146a0c6d4bdaf934f1169080b2c64b2f0573e086"}, - {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5870c6805b1a57962ed908d1173e97e13470415998393925c86a43694420389"}, - {file = "pulsar_client-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29cb5fedb969895b78301dc00a979133e69940812b8332e4de948bb0ad3db7cb"}, - {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e53c74bfa59b20c66adea95023169060f5048dd8d843e6ef9cd3b8ee2d23e93b"}, - {file = "pulsar_client-3.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99dbadb13967f1add57010971ed36b5a77d24afcdaea01960d0e55e56cf4ba6f"}, - {file = "pulsar_client-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:058887661d438796f42307dcc8054c84dea88a37683dae36498b95d7e1c39b37"}, -] - -[package.dependencies] -certifi = "*" - -[package.extras] -all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] -avro = ["fastavro (>=1.9.2)"] -functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pyasn1" -version = "0.6.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.8.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.20.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-settings" -version = "2.4.0" -description = "Settings management using Pydantic" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, -] - -[package.dependencies] -pydantic = ">=2.7.0" -python-dotenv = ">=0.21.0" - -[package.extras] -azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pypdf" -version = "4.3.1" -description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418"}, - {file = "pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b"}, -] - -[package.dependencies] -typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} - -[package.extras] -crypto = ["PyCryptodome", "cryptography"] -dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] -docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] -image = ["Pillow (>=8.0.0)"] - -[[package]] -name = "pyperclip" -version = "1.9.0" -description = "A cross-platform clipboard module for Python. (Only handles plain text for now.)" -optional = false -python-versions = "*" -files = [ - {file = "pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310"}, -] - -[[package]] -name = "pypika" -version = "0.48.9" -description = "A SQL query builder API for Python" -optional = false -python-versions = "*" -files = [ - {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, -] - -[[package]] -name = "pyproject-hooks" -version = "1.1.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, -] - -[[package]] -name = "pyreadline3" -version = "3.4.1" -description = "A python implementation of GNU readline." -optional = false -python-versions = "*" -files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, -] - -[[package]] -name = "pysbd" -version = "0.3.4" -description = "pysbd (Python Sentence Boundary Disambiguation) is a rule-based sentence boundary detection that works out-of-the-box across many languages." -optional = false -python-versions = ">=3" -files = [ - {file = "pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953"}, -] - -[[package]] -name = "pytest" -version = "8.3.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.8" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cov" -version = "5.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - -[[package]] -name = "pytest-flakefinder" -version = "1.1.0" -description = "Runs tests multiple times to expose flakiness." -optional = false -python-versions = ">=3.5" -files = [ - {file = "pytest-flakefinder-1.1.0.tar.gz", hash = "sha256:e2412a1920bdb8e7908783b20b3d57e9dad590cc39a93e8596ffdd493b403e0e"}, - {file = "pytest_flakefinder-1.1.0-py2.py3-none-any.whl", hash = "sha256:741e0e8eea427052f5b8c89c2b3c3019a50c39a59ce4df6a305a2c2d9ba2bd13"}, -] - -[package.dependencies] -pytest = ">=2.7.1" - -[[package]] -name = "pytest-instafail" -version = "0.5.0" -description = "pytest plugin to show failures instantly" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-instafail-0.5.0.tar.gz", hash = "sha256:33a606f7e0c8e646dc3bfee0d5e3a4b7b78ef7c36168cfa1f3d93af7ca706c9e"}, - {file = "pytest_instafail-0.5.0-py3-none-any.whl", hash = "sha256:6855414487e9e4bb76a118ce952c3c27d3866af15487506c4ded92eb72387819"}, -] - -[package.dependencies] -pytest = ">=5" - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "pytest-profiling" -version = "1.7.0" -description = "Profiling plugin for py.test" -optional = false -python-versions = "*" -files = [ - {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"}, - {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"}, -] - -[package.dependencies] -gprof2dot = "*" -pytest = "*" -six = "*" - -[package.extras] -tests = ["pytest-virtualenv"] - -[[package]] -name = "pytest-split" -version = "0.9.0" -description = "Pytest plugin which splits the test suite to equally sized sub suites based on test execution time." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "pytest_split-0.9.0-py3-none-any.whl", hash = "sha256:9e197df601828d76a1ab615158d9c6253ec9f96e46c1d3ea27187aa5ac0ef9de"}, - {file = "pytest_split-0.9.0.tar.gz", hash = "sha256:ca52527e4d9024f6ec3aba723527bd276d12096024999b1f5b8445a38da1e81c"}, -] - -[package.dependencies] -pytest = ">=5,<9" - -[[package]] -name = "pytest-sugar" -version = "1.0.0" -description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." -optional = false -python-versions = "*" -files = [ - {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, - {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, -] - -[package.dependencies] -packaging = ">=21.3" -pytest = ">=6.2.0" -termcolor = ">=2.1.0" - -[package.extras] -dev = ["black", "flake8", "pre-commit"] - -[[package]] -name = "pytest-xdist" -version = "3.6.1" -description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, - {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, -] - -[package.dependencies] -execnet = ">=2.1" -pytest = ">=7.0.0" - -[package.extras] -psutil = ["psutil (>=3.0)"] -setproctitle = ["setproctitle"] -testing = ["filelock"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-docx" -version = "1.1.2" -description = "Create, read, and update Microsoft Word .docx files." -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, - {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, -] - -[package.dependencies] -lxml = ">=3.1.0" -typing-extensions = ">=4.9.0" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "python-multipart" -version = "0.0.7" -description = "A streaming multipart parser for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, - {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, -] - -[package.extras] -dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.1.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, - {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, - {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, - {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, - {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, - {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, - {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, - {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, - {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qdrant-client" -version = "1.11.0" -description = "Client library for the Qdrant vector search engine" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qdrant_client-1.11.0-py3-none-any.whl", hash = "sha256:1f574ccebb91c0bc8a620c9a41a5a010084fbc4d8c6f1cd0ab7b2eeb97336fc0"}, - {file = "qdrant_client-1.11.0.tar.gz", hash = "sha256:7c1d4d7a96cfd1ee0cde2a21c607e9df86bcca795ad8d1fd274d295ab64b8458"}, -] - -[package.dependencies] -grpcio = ">=1.41.0" -grpcio-tools = ">=1.41.0" -httpx = {version = ">=0.20.0", extras = ["http2"]} -numpy = [ - {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26", markers = "python_version >= \"3.12\""}, -] -portalocker = ">=2.7.0,<3.0.0" -pydantic = ">=1.10.8" -urllib3 = ">=1.26.14,<3" - -[package.extras] -fastembed = ["fastembed (==0.3.4)"] -fastembed-gpu = ["fastembed-gpu (==0.3.4)"] - -[[package]] -name = "regex" -version = "2023.12.25" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.7" -files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "respx" -version = "0.21.1" -description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." -optional = false -python-versions = ">=3.7" -files = [ - {file = "respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20"}, - {file = "respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af"}, -] - -[package.dependencies] -httpx = ">=0.21.0" - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruff" -version = "0.4.10" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.4.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c2c4d0859305ac5a16310eec40e4e9a9dec5dcdfbe92697acd99624e8638dac"}, - {file = "ruff-0.4.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a79489607d1495685cdd911a323a35871abfb7a95d4f98fc6f85e799227ac46e"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1dd1681dfa90a41b8376a61af05cc4dc5ff32c8f14f5fe20dba9ff5deb80cd6"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c75c53bb79d71310dc79fb69eb4902fba804a81f374bc86a9b117a8d077a1784"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18238c80ee3d9100d3535d8eb15a59c4a0753b45cc55f8bf38f38d6a597b9739"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d8f71885bce242da344989cae08e263de29752f094233f932d4f5cfb4ef36a81"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:330421543bd3222cdfec481e8ff3460e8702ed1e58b494cf9d9e4bf90db52b9d"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e9b6fb3a37b772628415b00c4fc892f97954275394ed611056a4b8a2631365e"}, - {file = "ruff-0.4.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f54c481b39a762d48f64d97351048e842861c6662d63ec599f67d515cb417f6"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:67fe086b433b965c22de0b4259ddfe6fa541c95bf418499bedb9ad5fb8d1c631"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:acfaaab59543382085f9eb51f8e87bac26bf96b164839955f244d07125a982ef"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3cea07079962b2941244191569cf3a05541477286f5cafea638cd3aa94b56815"}, - {file = "ruff-0.4.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:338a64ef0748f8c3a80d7f05785930f7965d71ca260904a9321d13be24b79695"}, - {file = "ruff-0.4.10-py3-none-win32.whl", hash = "sha256:ffe3cd2f89cb54561c62e5fa20e8f182c0a444934bf430515a4b422f1ab7b7ca"}, - {file = "ruff-0.4.10-py3-none-win_amd64.whl", hash = "sha256:67f67cef43c55ffc8cc59e8e0b97e9e60b4837c8f21e8ab5ffd5d66e196e25f7"}, - {file = "ruff-0.4.10-py3-none-win_arm64.whl", hash = "sha256:dd1fcee327c20addac7916ca4e2653fbbf2e8388d8a6477ce5b4e986b68ae6c0"}, - {file = "ruff-0.4.10.tar.gz", hash = "sha256:3aa4f2bc388a30d346c56524f7cacca85945ba124945fe489952aadb6b5cd804"}, -] - -[[package]] -name = "s3transfer" -version = "0.10.2" -description = "An Amazon S3 Transfer Manager" -optional = false -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, -] - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - -[[package]] -name = "schema" -version = "0.7.7" -description = "Simple data validation library" -optional = false -python-versions = "*" -files = [ - {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"}, - {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"}, -] - -[[package]] -name = "sentry-sdk" -version = "2.13.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sentry_sdk-2.13.0-py2.py3-none-any.whl", hash = "sha256:6beede8fc2ab4043da7f69d95534e320944690680dd9a963178a49de71d726c6"}, - {file = "sentry_sdk-2.13.0.tar.gz", hash = "sha256:8d4a576f7a98eb2fdb40e13106e41f330e5c79d72a68be1316e7852cf4995260"}, -] - -[package.dependencies] -certifi = "*" -fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""} -loguru = {version = ">=0.5", optional = true, markers = "extra == \"loguru\""} -urllib3 = ">=1.26.11" - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -anthropic = ["anthropic (>=0.16)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] -langchain = ["langchain (>=0.0.210)"] -litestar = ["litestar (>=2.0.0)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=6)"] - -[[package]] -name = "setuptools" -version = "72.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-72.2.0-py3-none-any.whl", hash = "sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4"}, - {file = "setuptools-72.2.0.tar.gz", hash = "sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9"}, -] - -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shapely" -version = "2.0.5" -description = "Manipulation and analysis of geometric objects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, - {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, - {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, - {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, - {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, - {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"}, - {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"}, - {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"}, - {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"}, - {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"}, - {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"}, - {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"}, - {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, - {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, - {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, - {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, -] - -[package.dependencies] -numpy = ">=1.14,<3" - -[package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "spider-client" -version = "0.0.27" -description = "Python SDK for Spider Cloud API" -optional = false -python-versions = "*" -files = [ - {file = "spider-client-0.0.27.tar.gz", hash = "sha256:c3feaf5c491bd9a6c509efa0c8789452497073d9f68e70fc90e7626a6a8365aa"}, -] - -[package.dependencies] -requests = "*" - -[[package]] -name = "sqlalchemy" -version = "2.0.32" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, - {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, - {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, - {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, - {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, - {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, - {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, - {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, - {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlmodel" -version = "0.0.18" -description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -optional = false -python-versions = ">=3.7" -files = [ - {file = "sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7"}, - {file = "sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3"}, -] - -[package.dependencies] -pydantic = ">=1.10.13,<3.0.0" -SQLAlchemy = ">=2.0.0,<2.1.0" - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "sympy" -version = "1.13.2" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "tenacity" -version = "8.5.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "termcolor" -version = "2.4.0" -description = "ANSI color formatting for output in terminal" -optional = false -python-versions = ">=3.8" -files = [ - {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, - {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "tiktoken" -version = "0.7.0" -description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, - {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, - {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, - {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, - {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, - {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, - {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, - {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, - {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, - {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, - {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, - {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, - {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, - {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, - {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, - {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, - {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, - {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, - {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, - {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, - {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, - {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, -] - -[package.dependencies] -regex = ">=2022.1.18" -requests = ">=2.26.0" - -[package.extras] -blobfile = ["blobfile (>=2)"] - -[[package]] -name = "tokenizers" -version = "0.20.0" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"}, - {file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"}, - {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"}, - {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"}, - {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"}, - {file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"}, - {file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"}, - {file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"}, - {file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"}, - {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"}, - {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"}, - {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"}, - {file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"}, - {file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"}, - {file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"}, - {file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"}, - {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"}, - {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"}, - {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"}, - {file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"}, - {file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"}, - {file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"}, - {file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"}, - {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"}, - {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"}, - {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"}, - {file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"}, - {file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"}, - {file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"}, - {file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"}, - {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"}, - {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"}, - {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"}, - {file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"}, - {file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"}, - {file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"}, - {file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"}, - {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"}, - {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"}, - {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"}, - {file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"}, - {file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"}, - {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"}, - {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"}, - {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"}, - {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"}, - {file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"}, -] - -[package.dependencies] -huggingface-hub = ">=0.16.4,<1.0" - -[package.extras] -dev = ["tokenizers[testing]"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "typer" -version = "0.12.3" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, -] - -[package.dependencies] -types-setuptools = "*" - -[[package]] -name = "types-google-cloud-ndb" -version = "2.3.0.20240813" -description = "Typing stubs for google-cloud-ndb" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-google-cloud-ndb-2.3.0.20240813.tar.gz", hash = "sha256:f69b4f1abc4a2c423b288ffc48d2994b59358bfc151824614abc1d3f7f19f18d"}, - {file = "types_google_cloud_ndb-2.3.0.20240813-py3-none-any.whl", hash = "sha256:79404e04e97324d0b6466f297e92e734a38fb9cd064c2f3816820311bc6c3f57"}, -] - -[[package]] -name = "types-passlib" -version = "1.7.7.20240327" -description = "Typing stubs for passlib" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-passlib-1.7.7.20240327.tar.gz", hash = "sha256:4cce6a1a3a6afee9fc4728b4d9784300764ac2be747f5bcc01646d904b85f4bb"}, - {file = "types_passlib-1.7.7.20240327-py3-none-any.whl", hash = "sha256:3a3b7f4258b71034d2e2f4f307d6810f9904f906cdf375514c8bdbdb28a4ad23"}, -] - -[[package]] -name = "types-pillow" -version = "10.2.0.20240520" -description = "Typing stubs for Pillow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-Pillow-10.2.0.20240520.tar.gz", hash = "sha256:130b979195465fa1e1676d8e81c9c7c30319e8e95b12fae945e8f0d525213107"}, - {file = "types_Pillow-10.2.0.20240520-py3-none-any.whl", hash = "sha256:33c36494b380e2a269bb742181bea5d9b00820367822dbd3760f07210a1da23d"}, -] - -[[package]] -name = "types-pyasn1" -version = "0.6.0.20240402" -description = "Typing stubs for pyasn1" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyasn1-0.6.0.20240402.tar.gz", hash = "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a"}, - {file = "types_pyasn1-0.6.0.20240402-py3-none-any.whl", hash = "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"}, -] - -[[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" -description = "Typing stubs for pyOpenSSL" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, - {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - -[[package]] -name = "types-python-jose" -version = "3.3.4.20240106" -description = "Typing stubs for python-jose" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-jose-3.3.4.20240106.tar.gz", hash = "sha256:b18cf8c5080bbfe1ef7c3b707986435d9efca3e90889acb6a06f65e06bc3405a"}, - {file = "types_python_jose-3.3.4.20240106-py3-none-any.whl", hash = "sha256:b515a6c0c61f5e2a53bc93e3a2b024cbd42563e2e19cbde9fd1c2cc2cfe77ccc"}, -] - -[package.dependencies] -types-pyasn1 = "*" - -[[package]] -name = "types-pytz" -version = "2024.1.0.20240417" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pytz-2024.1.0.20240417.tar.gz", hash = "sha256:6810c8a1f68f21fdf0f4f374a432487c77645a0ac0b31de4bf4690cf21ad3981"}, - {file = "types_pytz-2024.1.0.20240417-py3-none-any.whl", hash = "sha256:8335d443310e2db7b74e007414e74c4f53b67452c0cb0d228ca359ccfba59659"}, -] - -[[package]] -name = "types-pywin32" -version = "306.0.0.20240806" -description = "Typing stubs for pywin32" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pywin32-306.0.0.20240806.tar.gz", hash = "sha256:aaac3c52f5ca7e9a2f79a838a51bc467babd09c0143cfcae62c1164a1ebb7964"}, - {file = "types_pywin32-306.0.0.20240806-py3-none-any.whl", hash = "sha256:70bcc3a950aa519881f6928bae6c05a2e3f46adaa5fa5110a4608dc6fb813d1e"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240808" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, -] - -[[package]] -name = "types-redis" -version = "4.6.0.20240806" -description = "Typing stubs for redis" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-redis-4.6.0.20240806.tar.gz", hash = "sha256:60dd02c2b91ea2d42ad079ac58dedc31d71d6eedb1c21d3796811b02baac655d"}, - {file = "types_redis-4.6.0.20240806-py3-none-any.whl", hash = "sha256:9d8fbe0ce37e3660c0a06982db7812384295d10a93d637c7f8604a2f3c88b0e6"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - -[[package]] -name = "types-requests" -version = "2.32.0.20240712" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, - {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, -] - -[package.dependencies] -urllib3 = ">=2" - -[[package]] -name = "types-setuptools" -version = "71.1.0.20240813" -description = "Typing stubs for setuptools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-setuptools-71.1.0.20240813.tar.gz", hash = "sha256:94ff4f0af18c7c24ac88932bcb0f5655fb7187a001b7c61e53a1bfdaf9877b54"}, - {file = "types_setuptools-71.1.0.20240813-py3-none-any.whl", hash = "sha256:d9d9ba2936f5d3b47b59ae9bf65942a60063ac1d6bbee180a8a79fbb43f22ce5"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "uncurl" -version = "0.0.11" -description = "A library to convert curl requests to python-requests." -optional = false -python-versions = "*" -files = [ - {file = "uncurl-0.0.11-py3-none-any.whl", hash = "sha256:5961e93f07a5c9f2ef8ae4245bd92b0a6ce503c851de980f5b70080ae74cdc59"}, - {file = "uncurl-0.0.11.tar.gz", hash = "sha256:530c9bbd4d118f4cde6194165ff484cc25b0661cd256f19e9d5fcb53fc077790"}, -] - -[package.dependencies] -pyperclip = "*" -six = "*" - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uvicorn" -version = "0.30.6" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} -h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "virtualenv" -version = "20.26.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "vulture" -version = "2.11" -description = "Find dead code" -optional = false -python-versions = ">=3.8" -files = [ - {file = "vulture-2.11-py2.py3-none-any.whl", hash = "sha256:12d745f7710ffbf6aeb8279ba9068a24d4e52e8ed333b8b044035c9d6b823aba"}, - {file = "vulture-2.11.tar.gz", hash = "sha256:f0fbb60bce6511aad87ee0736c502456737490a82d919a44e6d92262cb35f1c2"}, -] - -[package.dependencies] -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "watchfiles" -version = "0.23.0" -description = "Simple, modern and high performance file watching and code reload in python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"}, - {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"}, - {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"}, - {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"}, - {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"}, - {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"}, - {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"}, - {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"}, - {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"}, - {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"}, - {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"}, - {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2dddc2487d33e92f8b6222b5fb74ae2cfde5e8e6c44e0248d24ec23befdc5366"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e75695cc952e825fa3e0684a7f4a302f9128721f13eedd8dbd3af2ba450932b8"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2537ef60596511df79b91613a5bb499b63f46f01a11a81b0a2b0dedf645d0a9c"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20b423b58f5fdde704a226b598a2d78165fe29eb5621358fe57ea63f16f165c4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98732ec893975455708d6fc9a6daab527fc8bbe65be354a3861f8c450a632a4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee1f5fcbf5bc33acc0be9dd31130bcba35d6d2302e4eceafafd7d9018c7755ab"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f195338a5a7b50a058522b39517c50238358d9ad8284fd92943643144c0c03"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fcb8d59b0dbee2c9b32207084b67b2420f6431ed02c18bd191e6c575f5c48"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0eff099a4df36afaa0eea7a913aa64dcf2cbd4e7a4f319a73012210af4d23810"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a8323daae27ea290ba3350c70c836c0d2b0fb47897fa3b0ca6a5375b952b90d3"}, - {file = "watchfiles-0.23.0-cp38-none-win32.whl", hash = "sha256:aafea64a3ae698695975251f4254df2225e2624185a69534e7fe70581066bc1b"}, - {file = "watchfiles-0.23.0-cp38-none-win_amd64.whl", hash = "sha256:c846884b2e690ba62a51048a097acb6b5cd263d8bd91062cd6137e2880578472"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a753993635eccf1ecb185dedcc69d220dab41804272f45e4aef0a67e790c3eb3"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6bb91fa4d0b392f0f7e27c40981e46dda9eb0fbc84162c7fb478fe115944f491"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1f67312efa3902a8e8496bfa9824d3bec096ff83c4669ea555c6bdd213aa516"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ca6b71dcc50d320c88fb2d88ecd63924934a8abc1673683a242a7ca7d39e781"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aec5c29915caf08771d2507da3ac08e8de24a50f746eb1ed295584ba1820330"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1733b9bc2c8098c6bdb0ff7a3d7cb211753fecb7bd99bdd6df995621ee1a574b"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02ff5d7bd066c6a7673b17c8879cd8ee903078d184802a7ee851449c43521bdd"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e2de19801b0eaa4c5292a223effb7cfb43904cb742c5317a0ac686ed604765"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8ada449e22198c31fb013ae7e9add887e8d2bd2335401abd3cbc55f8c5083647"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3af1b05361e1cc497bf1be654a664750ae61f5739e4bb094a2be86ec8c6db9b6"}, - {file = "watchfiles-0.23.0-cp39-none-win32.whl", hash = "sha256:486bda18be5d25ab5d932699ceed918f68eb91f45d018b0343e3502e52866e5e"}, - {file = "watchfiles-0.23.0-cp39-none-win_amd64.whl", hash = "sha256:d2d42254b189a346249424fb9bb39182a19289a2409051ee432fb2926bad966a"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aba037c1310dd108411d27b3d5815998ef0e83573e47d4219f45753c710f969f"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a96ac14e184aa86dc43b8a22bb53854760a58b2966c2b41580de938e9bf26ed0"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11698bb2ea5e991d10f1f4f83a39a02f91e44e4bd05f01b5c1ec04c9342bf63c"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efadd40fca3a04063d40c4448c9303ce24dd6151dc162cfae4a2a060232ebdcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:556347b0abb4224c5ec688fc58214162e92a500323f50182f994f3ad33385dcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1cf7f486169986c4b9d34087f08ce56a35126600b6fef3028f19ca16d5889071"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18de0f82c62c4197bea5ecf4389288ac755896aac734bd2cc44004c56e4ac47"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532e1f2c491274d1333a814e4c5c2e8b92345d41b12dc806cf07aaff786beb66"}, - {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"}, -] - -[package.dependencies] -anyio = ">=3.0.0" - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[[package]] -name = "win32-setctime" -version = "1.1.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -files = [ - {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, - {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, -] - -[package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.20.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[extras] -all = [] -deploy = [] -local = [] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.10,<3.13" -content-hash = "6835a0ed4266b0aae88f40359616174b45cd79515c1d726935060bbb18a106ce" diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml index 6eff4a01d70b..d676489eb2c2 100644 --- a/src/backend/base/pyproject.toml +++ b/src/backend/base/pyproject.toml @@ -1,124 +1,6 @@ -[tool.poetry] -name = "langflow-base" -version = "0.0.95" -description = "A Python package with a built-in web application" -authors = ["Langflow "] -maintainers = [ - "Carlos Coelho ", - "Cristhian Zanforlin ", - "Gabriel Almeida ", - "Igor Carvalho ", - "Lucas Eduoli ", - "Otávio Anovazzi ", - "Rodrigo Nader ", - "Italo dos Anjos ", -] -repository = "https://github.com/langflow-ai/langflow" -license = "MIT" -readme = "README.md" -keywords = ["nlp", "langchain", "openai", "gpt", "gui"] -packages = [{ include = "langflow" }, { include = "langflow/py.typed" }] -include = ["pyproject.toml", "README.md", "langflow/**/*"] -documentation = "https://docs.langflow.org" - -[tool.poetry.scripts] -langflow-base = "langflow.__main__:main" - -[tool.poetry.dependencies] -python = ">=3.10,<3.13" -fastapi = "^0.111.0" -httpx = "*" -uvicorn = "^0.30.0" -gunicorn = "^22.0.0" -langchain = "~0.2.0" -langchain-core = "^0.2.32" -langchainhub = "~0.1.15" -sqlmodel = "^0.0.18" -loguru = "^0.7.1" -rich = "^13.7.0" -langchain-experimental = "^0.0.61" -pydantic = "^2.7.0" -pydantic-settings = "^2.2.0" -websockets = "*" -typer = "^0.12.0" -cachetools = "^5.3.1" -platformdirs = "^4.2.0" -python-multipart = "^0.0.7" -orjson = "3.10.0" -alembic = "^1.13.0" -passlib = "^1.7.4" -bcrypt = "4.0.1" -pillow = "^10.2.0" -docstring-parser = "^0.16" -python-jose = "^3.3.0" -pandas = "2.2.2" -multiprocess = "^0.70.14" -duckdb = "^1.0.0" -python-docx = "^1.1.0" -jq = { version = "^1.7.0", markers = "sys_platform != 'win32'" } -pypdf = "^4.2.0" -nest-asyncio = "^1.6.0" -emoji = "^2.12.0" -cryptography = "^42.0.5" -asyncer = "^0.0.5" -pyperclip = "^1.8.2" -uncurl = "^0.0.11" -sentry-sdk = {extras = ["fastapi", "loguru"], version = "^2.5.1"} -chardet = "^5.2.0" -firecrawl-py = "^0.0.16" -opentelemetry-api = "^1.25.0" -opentelemetry-sdk = "^1.25.0" -opentelemetry-exporter-prometheus = "^0.46b0" -opentelemetry-instrumentation-fastapi = "^0.46b0" -prometheus-client = "^0.20.0" -aiofiles = "^24.1.0" -setuptools = ">=70" -nanoid = "^2.0.0" -filelock = "^3.15.4" -grandalf = "^0.8.0" -crewai = "^0.36.0" -spider-client = "^0.0.27" -diskcache = "^5.6.3" - - -[tool.poetry.extras] -deploy = ["celery", "redis", "flower"] -local = ["llama-cpp-python", "sentence-transformers", "ctransformers"] -all = ["deploy", "local"] - - - -[tool.poetry.group.dev.dependencies] -types-redis = "^4.6.0.5" -ipykernel = "^6.29.0" -mypy = "^1.11.0" -ruff = "^0.4.5" -httpx = "*" -pytest = "^8.2.0" -types-requests = "^2.32.0" -requests = "^2.32.0" -pytest-cov = "^5.0.0" -pandas-stubs = "^2.1.4.231227" -types-pillow = "^10.2.0.20240213" -types-pyyaml = "^6.0.12.8" -types-python-jose = "^3.3.4.8" -types-passlib = "^1.7.7.13" -pytest-mock = "^3.14.0" -pytest-xdist = "^3.6.0" -types-pywin32 = "^306.0.0.4" -types-google-cloud-ndb = "^2.2.0.0" -pytest-sugar = "^1.0.0" -respx = "^0.21.1" -pytest-instafail = "^0.5.0" -pytest-asyncio = "^0.23.0" -pytest-profiling = "^1.7.0" -pre-commit = "^3.7.0" -vulture = "^2.11" -dictdiffer = "^0.9.0" -pytest-split = "^0.9.0" -devtools = "^0.12.2" -pytest-flakefinder = "^1.1.0" +[tool.hatch.build.targets.wheel] +packages = ["langflow"] [tool.pytest.ini_options] @@ -129,6 +11,8 @@ console_output_style = "progress" filterwarnings = ["ignore::DeprecationWarning"] log_cli = true markers = ["async_test"] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" [tool.mypy] plugins = ["pydantic.mypy"] @@ -139,9 +23,205 @@ mypy_path = "langflow" ignore_missing_imports = true [tool.ruff] -exclude = ["src/backend/langflow/alembic/*"] +target-version = "py310" +exclude = ["langflow/alembic"] line-length = 120 +[tool.ruff.lint] +flake8-annotations.mypy-init-return = true +flake8-bugbear.extend-immutable-calls = [ + "fastapi.Depends", + "fastapi.File", + "fastapi.Query", + "typer.Option", +] +flake8-type-checking.runtime-evaluated-base-classes = [ + "pydantic.BaseModel", + "typing.TypedDict", # Needed by fastapi + "typing_extensions.TypedDict", # Needed by fastapi +] +pydocstyle.convention = "google" +select = ["ALL"] +ignore = [ + "C90", # McCabe complexity + "CPY", # Missing copyright + "COM812", # Messes with the formatter + "ERA", # Eradicate commented-out code + "FIX002", # Line contains TODO + "ISC001", # Messes with the formatter + "PERF203", # Rarely useful + "PLR09", # Too many something (arg, statements, etc) + "RUF012", # Pydantic models are currently not well detected. See https://github.com/astral-sh/ruff/issues/13630 + "TD002", # Missing author in TODO + "TD003", # Missing issue link in TODO + "TRY301", # A bit too harsh (Abstract `raise` to an inner function) + + # Rules that are TODOs + "ANN", # Missing type annotations + "D1", # Missing docstrings + "SLF001", # Using private attributes outside of class +] + +[tool.ruff.lint.per-file-ignores] +"langflow/api/v1/*" = [ + "TCH", # FastAPI needs to evaluate types at runtime +] +"langflow/{components/tools/python_code_structured_tool.py,custom/code_parser/code_parser.py,utils/validate.py}" = [ + "S102", # Use of exec +] +"langflow/services/cache/*" = [ + "S301", # Use of pickle +] + +[tool.uv] +dev-dependencies = [ + "asgi-lifespan>=2.1.0", + "pytest-codspeed>=3.0.0", + "pytest-github-actions-annotate-failures>=0.2.0", +] + [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "langflow-base" +version = "0.1.0" +description = "A Python package with a built-in web application" +requires-python = ">=3.10,<3.13" +license = "MIT" +keywords = ["nlp", "langchain", "openai", "gpt", "gui"] +readme = "README.md" +maintainers = [ + { name = "Carlos Coelho", email = "carlos@langflow.org" }, + { name = "Cristhian Zanforlin", email = "cristhian.lousa@gmail.com" }, + { name = "Gabriel Almeida", email = "gabriel@langflow.org" }, + { name = "Igor Carvalho", email = "igorr.ackerman@gmail.com" }, + { name = "Lucas Eduoli", email = "lucaseduoli@gmail.com" }, + { name = "Otávio Anovazzi", email = "otavio2204@gmail.com" }, + { name = "Rodrigo Nader", email = "rodrigo@langflow.org" }, + { name = "Italo dos Anjos", email = "italojohnnydosanjos@gmail.com" }, +] + + + +dependencies = [ + "fastapi>=0.115.2", + "httpx[http2]>=0.27", + "uvicorn>=0.30.0", + "gunicorn>=22.0.0", + "langchain~=0.3.3", + "langchain-core~=0.3.15", + "langchainhub~=0.1.15", + "sqlmodel==0.0.18", + "loguru>=0.7.1", + "rich>=13.7.0", + "langchain-experimental>=0.0.61", + "pydantic>=2.7.0", + "pydantic-settings>=2.2.0", + "typer>=0.13.0", + "cachetools>=5.3.1", + "platformdirs>=4.2.0", + "python-multipart>=0.0.12", + "orjson==3.10.0", + "alembic>=1.13.0", + "passlib>=1.7.4", + "bcrypt==4.0.1", + "pillow>=10.2.0", + "docstring-parser>=0.16", + "python-jose>=3.3.0", + "pandas==2.2.2", + "multiprocess>=0.70.14", + "duckdb>=1.0.0", + "python-docx>=1.1.0", + "jq>=1.7.0; sys_platform != 'win32'", + "nest-asyncio>=1.6.0", + "emoji>=2.12.0", + "cryptography>=42.0.5,<44.0.0", + "asyncer>=0.0.5", + "pyperclip>=1.8.2", + "uncurl>=0.0.11", + "sentry-sdk[fastapi,loguru]>=2.5.1", + "chardet>=5.2.0", + "firecrawl-py>=0.0.16", + "opentelemetry-api>=1.25.0", + "opentelemetry-sdk>=1.25.0", + "opentelemetry-exporter-prometheus>=0.46b0", + "opentelemetry-instrumentation-fastapi>=0.46b0", + "prometheus-client>=0.20.0", + "aiofiles>=24.1.0", + "setuptools>=70", + "nanoid>=2.0.0", + "filelock>=3.15.4", + "grandalf>=0.8.0", + "crewai>=0.74.2", + "spider-client>=0.0.27", + "diskcache>=5.6.3", + "clickhouse-connect==0.7.19", + "assemblyai>=0.33.0", + "fastapi-pagination>=0.12.29", + "defusedxml>=0.7.1", + "pypdf~=5.1.0", +] + +[project.urls] +Repository = "https://github.com/langflow-ai/langflow" +Documentation = "https://docs.langflow.org" + +# Optional dependencies for uv +[project.optional-dependencies] +deploy = [ + "celery>=5.3.1", + "redis>=4.6.0", + "flower>=1.0.0" +] +local = [ + "llama-cpp-python>=0.2.0", + "sentence-transformers>=2.0.0", + "ctransformers>=0.2" +] +all = [ + "celery>=5.3.1", + "redis>=4.6.0", + "flower>=1.0.0", + "llama-cpp-python>=0.2.0", + "sentence-transformers>=2.0.0", + "ctransformers>=0.2" +] + +# Development dependencies +dev = [ + "types-redis>=4.6.0.5", + "ipykernel>=6.29.0", + "mypy>=1.11.0", + "ruff>=0.6.2", + "httpx[http2]>=0.27", + "pytest>=8.2.0", + "types-requests>=2.32.0", + "requests>=2.32.0", + "pytest-cov>=5.0.0", + "pandas-stubs>=2.1.4.231227", + "types-pillow>=10.2.0.20240213", + "types-pyyaml>=6.0.12.8", + "types-python-jose>=3.3.4.8", + "types-passlib>=1.7.7.13", + "pytest-mock>=3.14.0", + "pytest-xdist>=3.6.0", + "types-pywin32>=306.0.0.4", + "types-google-cloud-ndb>=2.2.0.0", + "pytest-sugar>=1.0.0", + "respx>=0.21.1", + "pytest-instafail>=0.5.0", + "pytest-asyncio>=0.23.0", + "pytest-profiling>=1.7.0", + "pre-commit>=3.7.0", + "vulture>=2.11", + "dictdiffer>=0.9.0", + "pytest-split>=0.9.0", + "devtools>=0.12.2", + "pytest-flakefinder>=1.1.0", + "types-markdown>=3.7.0.20240822" +] + +[project.scripts] +langflow-base = "langflow.__main__:main" diff --git a/src/backend/base/uv.lock b/src/backend/base/uv.lock new file mode 100644 index 000000000000..49d7c8226468 --- /dev/null +++ b/src/backend/base/uv.lock @@ -0,0 +1,5699 @@ +version = 1 +requires-python = ">=3.10, <3.13" +resolution-markers = [ + "python_full_version < '3.11'", + "python_full_version == '3.11.*'", + "python_full_version >= '3.12' and python_full_version < '3.12.4'", + "python_full_version >= '3.12.4'", +] + +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f7/22bba300a16fd1cad99da1a23793fe43963ee326d012fdf852d0b4035955/aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2", size = 16786 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/b6/58ea188899950d759a837f9a58b2aee1d1a380ea4d6211ce9b1823748851/aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd", size = 12155 }, +] + +[[package]] +name = "aiohttp" +version = "3.10.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/28/ca549838018140b92a19001a8628578b0f2a3b38c16826212cc6f706e6d4/aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691", size = 7524360 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/4a/b27dd9b88fe22dde88742b341fd10251746a6ffcfe1c0b8b15b4a8cbd7c1/aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3", size = 587010 }, + { url = "https://files.pythonhosted.org/packages/de/a9/0f7e2b71549c9d641086c423526ae7a10de3b88d03ba104a3df153574d0d/aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6", size = 397698 }, + { url = "https://files.pythonhosted.org/packages/3b/52/26baa486e811c25b0cd16a494038260795459055568713f841e78f016481/aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699", size = 389052 }, + { url = "https://files.pythonhosted.org/packages/33/df/71ba374a3e925539cb2f6e6d4f5326e7b6b200fabbe1b3cc5e6368f07ce7/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6", size = 1248615 }, + { url = "https://files.pythonhosted.org/packages/67/02/bb89c1eba08a27fc844933bee505d63d480caf8e2816c06961d2941cd128/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1", size = 1282930 }, + { url = "https://files.pythonhosted.org/packages/db/36/07d8cfcc37f39c039f93a4210cc71dadacca003609946c63af23659ba656/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f", size = 1317250 }, + { url = "https://files.pythonhosted.org/packages/9a/44/cabeac994bef8ba521b552ae996928afc6ee1975a411385a07409811b01f/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb", size = 1243212 }, + { url = "https://files.pythonhosted.org/packages/5a/11/23f1e31f5885ac72be52fd205981951dd2e4c87c5b1487cf82fde5bbd46c/aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91", size = 1213401 }, + { url = "https://files.pythonhosted.org/packages/3f/e7/6e69a0b0d896fbaf1192d492db4c21688e6c0d327486da610b0e8195bcc9/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f", size = 1212450 }, + { url = "https://files.pythonhosted.org/packages/a9/7f/a42f51074c723ea848254946aec118f1e59914a639dc8ba20b0c9247c195/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c", size = 1211324 }, + { url = "https://files.pythonhosted.org/packages/d5/43/c2f9d2f588ccef8f028f0a0c999b5ceafecbda50b943313faee7e91f3e03/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69", size = 1266838 }, + { url = "https://files.pythonhosted.org/packages/c1/a7/ff9f067ecb06896d859e4f2661667aee4bd9c616689599ff034b63cbd9d7/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3", size = 1285301 }, + { url = "https://files.pythonhosted.org/packages/9a/e3/dd56bb4c67d216046ce61d98dec0f3023043f1de48f561df1bf93dd47aea/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683", size = 1235806 }, + { url = "https://files.pythonhosted.org/packages/a7/64/90dcd42ac21927a49ba4140b2e4d50e1847379427ef6c43eb338ef9960e3/aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef", size = 360162 }, + { url = "https://files.pythonhosted.org/packages/f3/45/145d8b4853fc92c0c8509277642767e7726a085e390ce04353dc68b0f5b5/aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088", size = 379173 }, + { url = "https://files.pythonhosted.org/packages/f1/90/54ccb1e4eadfb6c95deff695582453f6208584431d69bf572782e9ae542b/aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2", size = 586455 }, + { url = "https://files.pythonhosted.org/packages/c3/7a/95e88c02756e7e718f054e1bb3ec6ad5d0ee4a2ca2bb1768c5844b3de30a/aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf", size = 397255 }, + { url = "https://files.pythonhosted.org/packages/07/4f/767387b39990e1ee9aba8ce642abcc286d84d06e068dc167dab983898f18/aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e", size = 388973 }, + { url = "https://files.pythonhosted.org/packages/61/46/0df41170a4d228c07b661b1ba9d87101d99a79339dc93b8b1183d8b20545/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77", size = 1326126 }, + { url = "https://files.pythonhosted.org/packages/af/20/da0d65e07ce49d79173fed41598f487a0a722e87cfbaa8bb7e078a7c1d39/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061", size = 1364538 }, + { url = "https://files.pythonhosted.org/packages/aa/20/b59728405114e57541ba9d5b96033e69d004e811ded299537f74237629ca/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697", size = 1399896 }, + { url = "https://files.pythonhosted.org/packages/2a/92/006690c31b830acbae09d2618e41308fe4c81c0679b3b33a3af859e0b7bf/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7", size = 1312914 }, + { url = "https://files.pythonhosted.org/packages/d4/71/1a253ca215b6c867adbd503f1e142117527ea8775e65962bc09b2fad1d2c/aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0", size = 1271301 }, + { url = "https://files.pythonhosted.org/packages/0a/ab/5d1d9ff9ce6cce8fa54774d0364e64a0f3cd50e512ff09082ced8e5217a1/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5", size = 1291652 }, + { url = "https://files.pythonhosted.org/packages/75/5f/f90510ea954b9ae6e7a53d2995b97a3e5c181110fdcf469bc9238445871d/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e", size = 1286289 }, + { url = "https://files.pythonhosted.org/packages/be/9e/1f523414237798660921817c82b9225a363af436458caf584d2fa6a2eb4a/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1", size = 1341848 }, + { url = "https://files.pythonhosted.org/packages/f6/36/443472ddaa85d7d80321fda541d9535b23ecefe0bf5792cc3955ea635190/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277", size = 1361619 }, + { url = "https://files.pythonhosted.org/packages/19/f6/3ecbac0bc4359c7d7ba9e85c6b10f57e20edaf1f97751ad2f892db231ad0/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058", size = 1320869 }, + { url = "https://files.pythonhosted.org/packages/34/7e/ed74ffb36e3a0cdec1b05d8fbaa29cb532371d5a20058b3a8052fc90fe7c/aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072", size = 359271 }, + { url = "https://files.pythonhosted.org/packages/98/1b/718901f04bc8c886a742be9e83babb7b93facabf7c475cc95e2b3ab80b4d/aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff", size = 379143 }, + { url = "https://files.pythonhosted.org/packages/d9/1c/74f9dad4a2fc4107e73456896283d915937f48177b99867b63381fadac6e/aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487", size = 583468 }, + { url = "https://files.pythonhosted.org/packages/12/29/68d090551f2b58ce76c2b436ced8dd2dfd32115d41299bf0b0c308a5483c/aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a", size = 394066 }, + { url = "https://files.pythonhosted.org/packages/8f/f7/971f88b4cdcaaa4622925ba7d86de47b48ec02a9040a143514b382f78da4/aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d", size = 389098 }, + { url = "https://files.pythonhosted.org/packages/f1/5a/fe3742efdce551667b2ddf1158b27c5b8eb1edc13d5e14e996e52e301025/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75", size = 1332742 }, + { url = "https://files.pythonhosted.org/packages/1a/52/a25c0334a1845eb4967dff279151b67ca32a948145a5812ed660ed900868/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178", size = 1372134 }, + { url = "https://files.pythonhosted.org/packages/96/3d/33c1d8efc2d8ec36bff9a8eca2df9fdf8a45269c6e24a88e74f2aa4f16bd/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e", size = 1414413 }, + { url = "https://files.pythonhosted.org/packages/64/74/0f1ddaa5f0caba1d946f0dd0c31f5744116e4a029beec454ec3726d3311f/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f", size = 1328107 }, + { url = "https://files.pythonhosted.org/packages/0a/32/c10118f0ad50e4093227234f71fd0abec6982c29367f65f32ee74ed652c4/aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73", size = 1280126 }, + { url = "https://files.pythonhosted.org/packages/c6/c9/77e3d648d97c03a42acfe843d03e97be3c5ef1b4d9de52e5bd2d28eed8e7/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf", size = 1292660 }, + { url = "https://files.pythonhosted.org/packages/7e/5d/99c71f8e5c8b64295be421b4c42d472766b263a1fe32e91b64bf77005bf2/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820", size = 1300988 }, + { url = "https://files.pythonhosted.org/packages/8f/2c/76d2377dd947f52fbe8afb19b18a3b816d66c7966755c04030f93b1f7b2d/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca", size = 1339268 }, + { url = "https://files.pythonhosted.org/packages/fd/e6/3d9d935cc705d57ed524d82ec5d6b678a53ac1552720ae41282caa273584/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91", size = 1366993 }, + { url = "https://files.pythonhosted.org/packages/fe/c2/f7eed4d602f3f224600d03ab2e1a7734999b0901b1c49b94dc5891340433/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6", size = 1329459 }, + { url = "https://files.pythonhosted.org/packages/ce/8f/27f205b76531fc592abe29e1ad265a16bf934a9f609509c02d765e6a8055/aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12", size = 356968 }, + { url = "https://files.pythonhosted.org/packages/39/8c/4f6c0b2b3629f6be6c81ab84d9d577590f74f01d4412bfc4067958eaa1e1/aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc", size = 377650 }, + { url = "https://files.pythonhosted.org/packages/7b/b9/03b4327897a5b5d29338fa9b514f1c2f66a3e4fc88a4e40fad478739314d/aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092", size = 576994 }, + { url = "https://files.pythonhosted.org/packages/67/1b/20c2e159cd07b8ed6dde71c2258233902fdf415b2fe6174bd2364ba63107/aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77", size = 390684 }, + { url = "https://files.pythonhosted.org/packages/4d/6b/ff83b34f157e370431d8081c5d1741963f4fb12f9aaddb2cacbf50305225/aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385", size = 386176 }, + { url = "https://files.pythonhosted.org/packages/4d/a1/6e92817eb657de287560962df4959b7ddd22859c4b23a0309e2d3de12538/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972", size = 1303310 }, + { url = "https://files.pythonhosted.org/packages/04/29/200518dc7a39c30ae6d5bc232d7207446536e93d3d9299b8e95db6e79c54/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16", size = 1340445 }, + { url = "https://files.pythonhosted.org/packages/8e/20/53f7bba841ba7b5bb5dea580fea01c65524879ba39cb917d08c845524717/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6", size = 1385121 }, + { url = "https://files.pythonhosted.org/packages/f1/b4/d99354ad614c48dd38fb1ee880a1a54bd9ab2c3bcad3013048d4a1797d3a/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa", size = 1299669 }, + { url = "https://files.pythonhosted.org/packages/51/39/ca1de675f2a5729c71c327e52ac6344e63f036bd37281686ae5c3fb13bfb/aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689", size = 1252638 }, + { url = "https://files.pythonhosted.org/packages/54/cf/a3ae7ff43138422d477348e309ef8275779701bf305ff6054831ef98b782/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57", size = 1266889 }, + { url = "https://files.pythonhosted.org/packages/6e/7a/c6027ad70d9fb23cf254a26144de2723821dade1a624446aa22cd0b6d012/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f", size = 1266249 }, + { url = "https://files.pythonhosted.org/packages/64/fd/ed136d46bc2c7e3342fed24662b4827771d55ceb5a7687847aae977bfc17/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599", size = 1311036 }, + { url = "https://files.pythonhosted.org/packages/76/9a/43eeb0166f1119256d6f43468f900db1aed7fbe32069d2a71c82f987db4d/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5", size = 1338756 }, + { url = "https://files.pythonhosted.org/packages/d5/bc/d01ff0810b3f5e26896f76d44225ed78b088ddd33079b85cd1a23514318b/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987", size = 1299976 }, + { url = "https://files.pythonhosted.org/packages/3e/c9/50a297c4f7ab57a949f4add2d3eafe5f3e68bb42f739e933f8b32a092bda/aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04", size = 355609 }, + { url = "https://files.pythonhosted.org/packages/65/28/aee9d04fb0b3b1f90622c338a08e54af5198e704a910e20947c473298fd0/aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022", size = 375697 }, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/67/0952ed97a9793b4958e5736f6d2b346b414a2cd63e82d05940032f45b32f/aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", size = 19422 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17", size = 7617 }, +] + +[[package]] +name = "alembic" +version = "1.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/e2/efa88e86029cada2da5941ec664d50d9a3b2a91f5066405c6f90e5016c16/alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef", size = 1206463 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/ed/c884465c33c25451e4a5cd4acad154c29e5341e3214e220e7f3478aa4b0d/alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953", size = 232990 }, +] + +[[package]] +name = "amqp" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/2c/6eb09fbdeb3c060b37bd33f8873832897a83e7a428afe01aad333fc405ec/amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd", size = 128754 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/f0/8e5be5d5e0653d9e1d02b1144efa33ff7d2963dfad07049e02c0fa9b2e8d/amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637", size = 50917 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/44/66874c5256e9fbc30103b31927fd9341c8da6ccafd4721b2b3e81e6ef176/anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9", size = 169376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/68/f9e9bf6324c46e6b8396610aef90ad423ec3e18c9079547ceafea3dce0ec/anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78", size = 89250 }, +] + +[[package]] +name = "appdirs" +version = "1.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566 }, +] + +[[package]] +name = "appnope" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 }, +] + +[[package]] +name = "asgiref" +version = "3.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/38/b3395cc9ad1b56d2ddac9970bc8f4141312dbaec28bc7c218b0dfafd0f42/asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590", size = 35186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 }, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/1d/f03bcb60c4a3212e15f99a56085d93093a497718adf828d050b9d675da81/asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0", size = 62284 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764 }, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721 }, +] + +[[package]] +name = "asyncer" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/33/b2cd9b49aae307c57de12774724468cff6e8b1e9a50d837b0144cf7f6bf5/asyncer-0.0.5.tar.gz", hash = "sha256:2979f3e04cbedfe5cfeb79027dcf7d004fcc4430a0ca0066ae20490f218ec06e", size = 8260 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/61/3158b0c8d67a071a8146c1c8055fedf087ad598caaec100b10c245366940/asyncer-0.0.5-py3-none-any.whl", hash = "sha256:ba06d6de3c750763868dffacf89b18d40b667605b0241d31c2ee43f188e2ab74", size = 8437 }, +] + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "backoff" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, +] + +[[package]] +name = "bcrypt" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/ae/3af7d006aacf513975fd1948a6b4d6f8b4a307f8a244e1a3d3774b297aad/bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd", size = 25498 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/d4/3b2657bd58ef02b23a07729b0df26f21af97169dbd0b5797afa9e97ebb49/bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f", size = 473446 }, + { url = "https://files.pythonhosted.org/packages/ec/0a/1582790232fef6c2aa201f345577306b8bfe465c2c665dec04c86a016879/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0", size = 583044 }, + { url = "https://files.pythonhosted.org/packages/41/16/49ff5146fb815742ad58cafb5034907aa7f166b1344d0ddd7fd1c818bd17/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410", size = 583189 }, + { url = "https://files.pythonhosted.org/packages/aa/48/fd2b197a9741fa790ba0b88a9b10b5e88e62ff5cf3e1bc96d8354d7ce613/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344", size = 593473 }, + { url = "https://files.pythonhosted.org/packages/7d/50/e683d8418974a602ba40899c8a5c38b3decaf5a4d36c32fc65dce454d8a8/bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a", size = 593249 }, + { url = "https://files.pythonhosted.org/packages/fb/a7/ee4561fd9b78ca23c8e5591c150cc58626a5dfb169345ab18e1c2c664ee0/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3", size = 583586 }, + { url = "https://files.pythonhosted.org/packages/64/fe/da28a5916128d541da0993328dc5cf4b43dfbf6655f2c7a2abe26ca2dc88/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2", size = 593659 }, + { url = "https://files.pythonhosted.org/packages/dd/4f/3632a69ce344c1551f7c9803196b191a8181c6a1ad2362c225581ef0d383/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535", size = 613116 }, + { url = "https://files.pythonhosted.org/packages/87/69/edacb37481d360d06fc947dab5734aaf511acb7d1a1f9e2849454376c0f8/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e", size = 624290 }, + { url = "https://files.pythonhosted.org/packages/aa/ca/6a534669890725cbb8c1fb4622019be31813c8edaa7b6d5b62fc9360a17e/bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab", size = 159428 }, + { url = "https://files.pythonhosted.org/packages/46/81/d8c22cd7e5e1c6a7d48e41a1d1d46c92f17dae70a54d9814f746e6027dec/bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9", size = 152930 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + +[[package]] +name = "billiard" +version = "4.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766 }, +] + +[[package]] +name = "boto3" +version = "1.35.23" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/31/aa8f565871e00264874bf220ab9913a168fe8acf8b19f7c1a344d1623104/boto3-1.35.23.tar.gz", hash = "sha256:3fbf1d5b749c92ed43aa190650979dff9f83790a42522e1e9eefa54c8e44bc4b", size = 108605 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/5f/94b0310a492dd97b70c927f67c189e339b5b09504bf251144eed913f766f/boto3-1.35.23-py3-none-any.whl", hash = "sha256:ecba4362f82e23ef775c72b3e6fdef3ef68443629b79e88886d5088302ffc050", size = 139156 }, +] + +[[package]] +name = "botocore" +version = "1.35.23" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9a/7a/1c9a1b478c4cdafae166572d5dc2aff93cd34c04fdfbfb0772cf1fccfcfa/botocore-1.35.23.tar.gz", hash = "sha256:25b17a9ccba6ad32bb5bf7ba4f52656aa03c1cb29f6b4e438050ee4ad1967a3b", size = 12775312 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/81/90e1b82697d849e4a5e7e6dcf21ef7ba9fa902b98324849bd2956e6efac3/botocore-1.35.23-py3-none-any.whl", hash = "sha256:cab9ec4e0367b9f33f0bc02c5a29f587b0119ecffd6d125bacee085dcbc8817d", size = 12565311 }, +] + +[[package]] +name = "build" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "os_name == 'nt'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, + { name = "packaging" }, + { name = "pyproject-hooks" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/bb/4a1b7e3a7520e310cf7bfece43788071604e1ccf693a7f0c4638c59068d6/build-1.2.2.tar.gz", hash = "sha256:119b2fb462adef986483438377a13b2f42064a2a3a4161f24a0cca698a07ac8c", size = 46516 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/fd/e4bda6228637ecae5732162b5ac2a5a822e2ba8e546eb4997cde51b231a3/build-1.2.2-py3-none-any.whl", hash = "sha256:277ccc71619d98afdd841a0e96ac9fe1593b823af481d3b0cea748e8894e0613", size = 22823 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "celery" +version = "5.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "billiard" }, + { name = "click" }, + { name = "click-didyoumean" }, + { name = "click-plugins" }, + { name = "click-repl" }, + { name = "kombu" }, + { name = "python-dateutil" }, + { name = "tzdata" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/9c/cf0bce2cc1c8971bf56629d8f180e4ca35612c7e79e6e432e785261a8be4/celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706", size = 1575692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/c4/6a4d3772e5407622feb93dd25c86ce3c0fee746fa822a777a627d56b4f2a/celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64", size = 425983 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/61/095a0aa1a84d1481998b534177c8566fdc50bb1233ea9a0478cd3cc075bd/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", size = 194219 }, + { url = "https://files.pythonhosted.org/packages/cc/94/f7cf5e5134175de79ad2059edf2adce18e0685ebdb9227ff0139975d0e93/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", size = 122521 }, + { url = "https://files.pythonhosted.org/packages/46/6a/d5c26c41c49b546860cc1acabdddf48b0b3fb2685f4f5617ac59261b44ae/charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", size = 120383 }, + { url = "https://files.pythonhosted.org/packages/b8/60/e2f67915a51be59d4539ed189eb0a2b0d292bf79270410746becb32bc2c3/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", size = 138223 }, + { url = "https://files.pythonhosted.org/packages/05/8c/eb854996d5fef5e4f33ad56927ad053d04dc820e4a3d39023f35cad72617/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", size = 148101 }, + { url = "https://files.pythonhosted.org/packages/f6/93/bb6cbeec3bf9da9b2eba458c15966658d1daa8b982c642f81c93ad9b40e1/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", size = 140699 }, + { url = "https://files.pythonhosted.org/packages/da/f1/3702ba2a7470666a62fd81c58a4c40be00670e5006a67f4d626e57f013ae/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", size = 142065 }, + { url = "https://files.pythonhosted.org/packages/3f/ba/3f5e7be00b215fa10e13d64b1f6237eb6ebea66676a41b2bcdd09fe74323/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", size = 144505 }, + { url = "https://files.pythonhosted.org/packages/33/c3/3b96a435c5109dd5b6adc8a59ba1d678b302a97938f032e3770cc84cd354/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", size = 139425 }, + { url = "https://files.pythonhosted.org/packages/43/05/3bf613e719efe68fb3a77f9c536a389f35b95d75424b96b426a47a45ef1d/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", size = 145287 }, + { url = "https://files.pythonhosted.org/packages/58/78/a0bc646900994df12e07b4ae5c713f2b3e5998f58b9d3720cce2aa45652f/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", size = 149929 }, + { url = "https://files.pythonhosted.org/packages/eb/5c/97d97248af4920bc68687d9c3b3c0f47c910e21a8ff80af4565a576bd2f0/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", size = 141605 }, + { url = "https://files.pythonhosted.org/packages/a8/31/47d018ef89f95b8aded95c589a77c072c55e94b50a41aa99c0a2008a45a4/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", size = 142646 }, + { url = "https://files.pythonhosted.org/packages/ae/d5/4fecf1d58bedb1340a50f165ba1c7ddc0400252d6832ff619c4568b36cc0/charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", size = 92846 }, + { url = "https://files.pythonhosted.org/packages/a2/a0/4af29e22cb5942488cf45630cbdd7cefd908768e69bdd90280842e4e8529/charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", size = 100343 }, + { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647 }, + { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434 }, + { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979 }, + { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582 }, + { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645 }, + { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398 }, + { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273 }, + { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577 }, + { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747 }, + { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375 }, + { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232 }, + { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859 }, + { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509 }, + { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870 }, + { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892 }, + { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213 }, + { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404 }, + { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275 }, + { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518 }, + { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182 }, + { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869 }, + { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042 }, + { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275 }, + { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819 }, + { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415 }, + { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212 }, + { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167 }, + { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041 }, + { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397 }, + { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543 }, +] + +[[package]] +name = "chroma-hnswlib" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/59/1224cbae62c7b84c84088cdf6c106b9b2b893783c000d22c442a1672bc75/chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932", size = 31876 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/36/d1069ffa520efcf93f6d81b527e3c7311e12363742fdc786cbdaea3ab02e/chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca", size = 219588 }, + { url = "https://files.pythonhosted.org/packages/c3/e8/263d331f5ce29367f6f8854cd7fa1f54fce72ab4f92ab957525ef9165a9c/chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a", size = 197094 }, + { url = "https://files.pythonhosted.org/packages/a9/72/a9b61ae00d490c26359a8e10f3974c0d38065b894e6a2573ec6a7597f8e3/chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210", size = 2315620 }, + { url = "https://files.pythonhosted.org/packages/2f/48/f7609a3cb15a24c5d8ec18911ce10ac94144e9a89584f0a86bf9871b024c/chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403", size = 2350956 }, + { url = "https://files.pythonhosted.org/packages/cc/3d/ca311b8f79744db3f4faad8fd9140af80d34c94829d3ed1726c98cf4a611/chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029", size = 150598 }, + { url = "https://files.pythonhosted.org/packages/94/3f/844393b0d2ea1072b7704d6eff5c595e05ae8b831b96340cdb76b2fe995c/chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667", size = 221219 }, + { url = "https://files.pythonhosted.org/packages/11/7a/673ccb9bb2faf9cf655d9040e970c02a96645966e06837fde7d10edf242a/chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b", size = 198652 }, + { url = "https://files.pythonhosted.org/packages/ba/f4/c81a40da5473d5d80fc9d0c5bd5b1cb64e530a6ea941c69f195fe81c488c/chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec", size = 2332260 }, + { url = "https://files.pythonhosted.org/packages/48/0e/068b658a547d6090b969014146321e28dae1411da54b76d081e51a2af22b/chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756", size = 2367211 }, + { url = "https://files.pythonhosted.org/packages/d2/32/a91850c7aa8a34f61838913155103808fe90da6f1ea4302731b59e9ba6f2/chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f", size = 151574 }, +] + +[[package]] +name = "chromadb" +version = "0.4.24" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt" }, + { name = "build" }, + { name = "chroma-hnswlib" }, + { name = "fastapi" }, + { name = "grpcio" }, + { name = "importlib-resources" }, + { name = "kubernetes" }, + { name = "mmh3" }, + { name = "numpy" }, + { name = "onnxruntime" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-instrumentation-fastapi" }, + { name = "opentelemetry-sdk" }, + { name = "orjson" }, + { name = "overrides" }, + { name = "posthog" }, + { name = "pulsar-client" }, + { name = "pydantic" }, + { name = "pypika" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tenacity" }, + { name = "tokenizers" }, + { name = "tqdm" }, + { name = "typer" }, + { name = "typing-extensions" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/47/6b/a5465827d8017b658d18ad1e63d2dc31109dec717c6bd068e82485186f4b/chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72", size = 13667084 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/63/b7d76109331318423f9cfb89bd89c99e19f5d0b47a5105439a629224d297/chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da", size = 525452 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631 }, +] + +[[package]] +name = "click-plugins" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/1d/45434f64ed749540af821fd7e42b8e4d23ac04b1eda7c26613288d6cd8a8/click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", size = 8164 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/da/824b92d9942f4e472702488857914bdd50f73021efea15b4cad9aca8ecef/click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8", size = 7497 }, +] + +[[package]] +name = "click-repl" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289 }, +] + +[[package]] +name = "clickhouse-connect" +version = "0.7.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "lz4" }, + { name = "pytz" }, + { name = "urllib3" }, + { name = "zstandard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/8e/bf6012f7b45dbb74e19ad5c881a7bbcd1e7dd2b990f12cc434294d917800/clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0", size = 84918 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/22/99f2b2e8995bb0fb7b23c62df090264332f19a32edba55c11dc13c28c6a6/clickhouse_connect-0.7.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ac74eb9e8d6331bae0303d0fc6bdc2125aa4c421ef646348b588760b38c29e9", size = 253579 }, + { url = "https://files.pythonhosted.org/packages/35/84/b56a44d648871c4e1c27e9ca5880bf72e9ed087507a933aa31a5be501d0c/clickhouse_connect-0.7.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:300f3dea7dd48b2798533ed2486e4b0c3bb03c8d9df9aed3fac44161b92a30f9", size = 245769 }, + { url = "https://files.pythonhosted.org/packages/26/75/3029b2282d983d3113a6b96629cf29dace979d622ea87c3313ddfb568775/clickhouse_connect-0.7.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c72629f519105e21600680c791459d729889a290440bbdc61e43cd5eb61d928", size = 957813 }, + { url = "https://files.pythonhosted.org/packages/4d/66/23c768b471280771654c3ecb01aaddde59789b84970961b016553c0b1a2a/clickhouse_connect-0.7.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ece0fb202cd9267b3872210e8e0974e4c33c8f91ca9f1c4d92edea997189c72", size = 972916 }, + { url = "https://files.pythonhosted.org/packages/3b/79/328d44d3c7cef72958d8c754902290e2e287be6df225eddb9eb9ea0e17e3/clickhouse_connect-0.7.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6e5adf0359043d4d21c9a668cc1b6323a1159b3e1a77aea6f82ce528b5e4c5b", size = 949279 }, + { url = "https://files.pythonhosted.org/packages/11/e3/d7d4fac683dc864ba91a77835c17372bbd9e2bcb76cdc5750e42a7051f0a/clickhouse_connect-0.7.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:63432180179e90f6f3c18861216f902d1693979e3c26a7f9ef9912c92ce00d14", size = 985868 }, + { url = "https://files.pythonhosted.org/packages/c6/dd/cac1b8f916bf62a04c15441a8f528c0f7440ab2d94e0d949c2846f7f767d/clickhouse_connect-0.7.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:754b9c58b032835caaa9177b69059dc88307485d2cf6d0d545b3dedb13cb512a", size = 963774 }, + { url = "https://files.pythonhosted.org/packages/39/89/44418f8941898e8abe71cead3161e33b0e9d3066e2a81c6e52e68fdac52e/clickhouse_connect-0.7.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:24e2694e89d12bba405a14b84c36318620dc50f90adbc93182418742d8f6d73f", size = 1000850 }, + { url = "https://files.pythonhosted.org/packages/06/44/40daf67c8e0d5db2050045488b89ab0d0478f16a5c4419c78759d2f29f54/clickhouse_connect-0.7.19-cp310-cp310-win32.whl", hash = "sha256:52929826b39b5b0f90f423b7a035930b8894b508768e620a5086248bcbad3707", size = 221622 }, + { url = "https://files.pythonhosted.org/packages/65/3d/3f07babc5c4c3f973dc20584a304abdf085d4c52e762f5fa9f936cc74ce2/clickhouse_connect-0.7.19-cp310-cp310-win_amd64.whl", hash = "sha256:5c301284c87d132963388b6e8e4a690c0776d25acc8657366eccab485e53738f", size = 238900 }, + { url = "https://files.pythonhosted.org/packages/68/6f/a78cad40dc0f1fee19094c40abd7d23ff04bb491732c3a65b3661d426c89/clickhouse_connect-0.7.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee47af8926a7ec3a970e0ebf29a82cbbe3b1b7eae43336a81b3a0ca18091de5f", size = 253530 }, + { url = "https://files.pythonhosted.org/packages/40/82/419d110149900ace5eb0787c668d11e1657ac0eabb65c1404f039746f4ed/clickhouse_connect-0.7.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce429233b2d21a8a149c8cd836a2555393cbcf23d61233520db332942ffb8964", size = 245691 }, + { url = "https://files.pythonhosted.org/packages/e3/9c/ad6708ced6cf9418334d2bf19bbba3c223511ed852eb85f79b1e7c20cdbd/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:617c04f5c46eed3344a7861cd96fb05293e70d3b40d21541b1e459e7574efa96", size = 1055273 }, + { url = "https://files.pythonhosted.org/packages/ea/99/88c24542d6218100793cfb13af54d7ad4143d6515b0b3d621ba3b5a2d8af/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08e33b8cc2dc1873edc5ee4088d4fc3c0dbb69b00e057547bcdc7e9680b43e5", size = 1067030 }, + { url = "https://files.pythonhosted.org/packages/c8/84/19eb776b4e760317c21214c811f04f612cba7eee0f2818a7d6806898a994/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921886b887f762e5cc3eef57ef784d419a3f66df85fd86fa2e7fbbf464c4c54a", size = 1027207 }, + { url = "https://files.pythonhosted.org/packages/22/81/c2982a33b088b6c9af5d0bdc46413adc5fedceae063b1f8b56570bb28887/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ad0cf8552a9e985cfa6524b674ae7c8f5ba51df5bd3ecddbd86c82cdbef41a7", size = 1054850 }, + { url = "https://files.pythonhosted.org/packages/7b/a4/4a84ed3e92323d12700011cc8c4039f00a8c888079d65e75a4d4758ba288/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:70f838ef0861cdf0e2e198171a1f3fd2ee05cf58e93495eeb9b17dfafb278186", size = 1022784 }, + { url = "https://files.pythonhosted.org/packages/5e/67/3f5cc6f78c9adbbd6a3183a3f9f3196a116be19e958d7eaa6e307b391fed/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c5f0d207cb0dcc1adb28ced63f872d080924b7562b263a9d54d4693b670eb066", size = 1071084 }, + { url = "https://files.pythonhosted.org/packages/01/8d/a294e1cc752e22bc6ee08aa421ea31ed9559b09d46d35499449140a5c374/clickhouse_connect-0.7.19-cp311-cp311-win32.whl", hash = "sha256:8c96c4c242b98fcf8005e678a26dbd4361748721b6fa158c1fe84ad15c7edbbe", size = 221156 }, + { url = "https://files.pythonhosted.org/packages/68/69/09b3a4e53f5d3d770e9fa70f6f04642cdb37cc76d37279c55fd4e868f845/clickhouse_connect-0.7.19-cp311-cp311-win_amd64.whl", hash = "sha256:bda092bab224875ed7c7683707d63f8a2322df654c4716e6611893a18d83e908", size = 238826 }, + { url = "https://files.pythonhosted.org/packages/af/f8/1d48719728bac33c1a9815e0a7230940e078fd985b09af2371715de78a3c/clickhouse_connect-0.7.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f170d08166438d29f0dcfc8a91b672c783dc751945559e65eefff55096f9274", size = 256687 }, + { url = "https://files.pythonhosted.org/packages/ed/0d/3cbbbd204be045c4727f9007679ad97d3d1d559b43ba844373a79af54d16/clickhouse_connect-0.7.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26b80cb8f66bde9149a9a2180e2cc4895c1b7d34f9dceba81630a9b9a9ae66b2", size = 247631 }, + { url = "https://files.pythonhosted.org/packages/b6/44/adb55285226d60e9c46331a9980c88dad8c8de12abb895c4e3149a088092/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba80e3598acf916c4d1b2515671f65d9efee612a783c17c56a5a646f4db59b9", size = 1053767 }, + { url = "https://files.pythonhosted.org/packages/6c/f3/a109c26a41153768be57374cb823cac5daf74c9098a5c61081ffabeb4e59/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d38c30bd847af0ce7ff738152478f913854db356af4d5824096394d0eab873d", size = 1072014 }, + { url = "https://files.pythonhosted.org/packages/51/80/9c200e5e392a538f2444c9a6a93e1cf0e36588c7e8720882ac001e23b246/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41d4b159071c0e4f607563932d4fa5c2a8fc27d3ba1200d0929b361e5191864", size = 1027423 }, + { url = "https://files.pythonhosted.org/packages/33/a3/219fcd1572f1ce198dcef86da8c6c526b04f56e8b7a82e21119677f89379/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3682c2426f5dbda574611210e3c7c951b9557293a49eb60a7438552435873889", size = 1053683 }, + { url = "https://files.pythonhosted.org/packages/5d/df/687d90fbc0fd8ce586c46400f3791deac120e4c080aa8b343c0f676dfb08/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6d492064dca278eb61be3a2d70a5f082e2ebc8ceebd4f33752ae234116192020", size = 1021120 }, + { url = "https://files.pythonhosted.org/packages/c8/3b/39ba71b103275df8ec90d424dbaca2dba82b28398c3d2aeac5a0141b6aae/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:62612da163b934c1ff35df6155a47cf17ac0e2d2f9f0f8f913641e5c02cdf39f", size = 1073652 }, + { url = "https://files.pythonhosted.org/packages/b3/92/06df8790a7d93d5d5f1098604fc7d79682784818030091966a3ce3f766a8/clickhouse_connect-0.7.19-cp312-cp312-win32.whl", hash = "sha256:196e48c977affc045794ec7281b4d711e169def00535ecab5f9fdeb8c177f149", size = 221589 }, + { url = "https://files.pythonhosted.org/packages/42/1f/935d0810b73184a1d306f92458cb0a2e9b0de2377f536da874e063b8e422/clickhouse_connect-0.7.19-cp312-cp312-win_amd64.whl", hash = "sha256:b771ca6a473d65103dcae82810d3a62475c5372fc38d8f211513c72b954fb020", size = 239584 }, + { url = "https://files.pythonhosted.org/packages/f0/07/0753e145f878a22a37be921bde763a1f831d1d1b18a1be5c60b61df7f827/clickhouse_connect-0.7.19-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f31898e0281f820e35710b5c4ad1d40a6c01ffae5278afaef4a16877ac8cbfb", size = 223426 }, + { url = "https://files.pythonhosted.org/packages/e7/0a/adc9e05e6d38d9f755ac2fbfab8e1e2942bd050e8727238c0734c7e84ad3/clickhouse_connect-0.7.19-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c911b0b8281ab4a909320f41dd9c0662796bec157c8f2704de702c552104db", size = 246972 }, + { url = "https://files.pythonhosted.org/packages/a7/01/89dab7722809f2a4fbf77e4f3ad610bc60608abc2a4680167bf8a55d95cb/clickhouse_connect-0.7.19-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1088da11789c519f9bb8927a14b16892e3c65e2893abe2680eae68bf6c63835", size = 254362 }, + { url = "https://files.pythonhosted.org/packages/85/a3/a3ce0e66164fb6a25097e77a9140cac4bb798dd2053c397f142ba53c3bc3/clickhouse_connect-0.7.19-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03953942cc073078b40619a735ebeaed9bf98efc71c6f43ce92a38540b1308ce", size = 260706 }, + { url = "https://files.pythonhosted.org/packages/ee/f5/817b4920915d6d24600d2b632098c1e7602b767ca9a4f14ae35047199966/clickhouse_connect-0.7.19-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4ac0602fa305d097a0cd40cebbe10a808f6478c9f303d57a48a3a0ad09659544", size = 226072 }, +] + +[[package]] +name = "cohere" +version = "5.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "fastavro" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "parameterized" }, + { name = "pydantic" }, + { name = "pydantic-core" }, + { name = "requests" }, + { name = "tokenizers" }, + { name = "types-requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/a8/f152cf18c6b2594d9bb73ce7dae381571e98e62fc935f1a90798b3e3775e/cohere-5.9.4.tar.gz", hash = "sha256:ed0fa256c51423175c208650dffcb534ae112dc3ab7703de352e2adaf99dd50b", size = 116061 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/d1/b2997a91a7690b90003619b24c95eb080b3c776a00eac1585a4b6396257c/cohere-5.9.4-py3-none-any.whl", hash = "sha256:d1b31d8ba32e338b3aa91737aa98dc74de8778ed8e397ab799739b5f060f44e7", size = 233061 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanfriendly" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, +] + +[[package]] +name = "comm" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180 }, +] + +[[package]] +name = "coverage" +version = "7.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690 }, + { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127 }, + { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654 }, + { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598 }, + { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732 }, + { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816 }, + { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325 }, + { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418 }, + { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343 }, + { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136 }, + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, + { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "crewai" +version = "0.36.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appdirs" }, + { name = "click" }, + { name = "embedchain" }, + { name = "instructor" }, + { name = "jsonref" }, + { name = "langchain" }, + { name = "openai" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-sdk" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "regex" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/6a/dddf6cda7d5a27f77eeedde07ec2621df1e1e688b6a3a4a470b0437994c0/crewai-0.36.1.tar.gz", hash = "sha256:ea50ec5d3ef2df85e1b520efd9331bebb49ed7143e6cd1feec645da49217d2b0", size = 62205 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/97/1b7d184ed46a9968acbfd558ae14e65ac26306df16bcb6580cab5d6ba794/crewai-0.36.1-py3-none-any.whl", hash = "sha256:dbaa50d102542ea0c790bd62511b35234b2f5fa8d2333a6598beb84f407f0e00", size = 78077 }, +] + +[[package]] +name = "cryptography" +version = "43.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/ba/0664727028b37e249e73879348cc46d45c5c1a2a2e81e8166462953c5755/cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d", size = 686927 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/28/b92c98a04ba762f8cdeb54eba5c4c84e63cac037a7c5e70117d337b15ad6/cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d", size = 6223222 }, + { url = "https://files.pythonhosted.org/packages/33/13/1193774705783ba364121aa2a60132fa31a668b8ababd5edfa1662354ccd/cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062", size = 3794751 }, + { url = "https://files.pythonhosted.org/packages/5e/4b/39bb3c4c8cfb3e94e736b8d8859ce5c81536e91a1033b1d26770c4249000/cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962", size = 3981827 }, + { url = "https://files.pythonhosted.org/packages/ce/dc/1471d4d56608e1013237af334b8a4c35d53895694fbb73882d1c4fd3f55e/cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277", size = 3780034 }, + { url = "https://files.pythonhosted.org/packages/ad/43/7a9920135b0d5437cc2f8f529fa757431eb6a7736ddfadfdee1cc5890800/cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a", size = 3993407 }, + { url = "https://files.pythonhosted.org/packages/cc/42/9ab8467af6c0b76f3d9b8f01d1cf25b9c9f3f2151f4acfab888d21c55a72/cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042", size = 3886457 }, + { url = "https://files.pythonhosted.org/packages/a4/65/430509e31700286ec02868a2457d2111d03ccefc20349d24e58d171ae0a7/cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494", size = 4081499 }, + { url = "https://files.pythonhosted.org/packages/bb/18/a04b6467e6e09df8c73b91dcee8878f4a438a43a3603dc3cd6f8003b92d8/cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2", size = 2616504 }, + { url = "https://files.pythonhosted.org/packages/cc/73/0eacbdc437202edcbdc07f3576ed8fb8b0ab79d27bf2c5d822d758a72faa/cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d", size = 3067456 }, + { url = "https://files.pythonhosted.org/packages/8a/b6/bc54b371f02cffd35ff8dc6baba88304d7cf8e83632566b4b42e00383e03/cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d", size = 6225263 }, + { url = "https://files.pythonhosted.org/packages/00/0e/8217e348a1fa417ec4c78cd3cdf24154f5e76fd7597343a35bd403650dfd/cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806", size = 3794368 }, + { url = "https://files.pythonhosted.org/packages/3d/ed/38b6be7254d8f7251fde8054af597ee8afa14f911da67a9410a45f602fc3/cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85", size = 3981750 }, + { url = "https://files.pythonhosted.org/packages/64/f3/b7946c3887cf7436f002f4cbb1e6aec77b8d299b86be48eeadfefb937c4b/cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c", size = 3778925 }, + { url = "https://files.pythonhosted.org/packages/ac/7e/ebda4dd4ae098a0990753efbb4b50954f1d03003846b943ea85070782da7/cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1", size = 3993152 }, + { url = "https://files.pythonhosted.org/packages/43/f6/feebbd78a3e341e3913846a3bb2c29d0b09b1b3af1573c6baabc2533e147/cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa", size = 3886392 }, + { url = "https://files.pythonhosted.org/packages/bd/4c/ab0b9407d5247576290b4fd8abd06b7f51bd414f04eef0f2800675512d61/cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4", size = 4082606 }, + { url = "https://files.pythonhosted.org/packages/05/36/e532a671998d6fcfdb9122da16434347a58a6bae9465e527e450e0bc60a5/cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47", size = 2617948 }, + { url = "https://files.pythonhosted.org/packages/b3/c6/c09cee6968add5ff868525c3815e5dccc0e3c6e89eec58dc9135d3c40e88/cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb", size = 3070445 }, + { url = "https://files.pythonhosted.org/packages/18/23/4175dcd935e1649865e1af7bd0b827cc9d9769a586dcc84f7cbe96839086/cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034", size = 3152694 }, + { url = "https://files.pythonhosted.org/packages/ea/45/967da50269954b993d4484bf85026c7377bd551651ebdabba94905972556/cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d", size = 3713077 }, + { url = "https://files.pythonhosted.org/packages/df/e6/ccd29a1f9a6b71294e1e9f530c4d779d5dd37c8bb736c05d5fb6d98a971b/cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289", size = 3915597 }, + { url = "https://files.pythonhosted.org/packages/a2/80/fb7d668f1be5e4443b7ac191f68390be24f7c2ebd36011741f62c7645eb2/cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84", size = 2989208 }, +] + +[[package]] +name = "ctransformers" +version = "0.2.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "py-cpuinfo" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/5e/6ed7eaf8f54b5b078e2a609e90369c6999e67f915b9c1927c0d686c494f9/ctransformers-0.2.27.tar.gz", hash = "sha256:25653d4be8a5ed4e2d3756544c1e9881bf95404be5371c3ed506a256c28663d5", size = 376065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/50/0b608e2abee4fc695b4e7ff5f569f5d32faf84a49e322034716fa157d1cf/ctransformers-0.2.27-py3-none-any.whl", hash = "sha256:6a3ba47556471850d95fdbc59299a82ab91c9dc8b40201c5e7e82d71360772d9", size = 9853506 }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686 }, +] + +[[package]] +name = "debugpy" +version = "1.8.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/f9/61c325a10ded8dc3ddc3e7cd2ed58c0b15b2ef4bf8b4bf2930ee98ed59ee/debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0", size = 4612118 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/36/0b423f94097cc86555f9a2c8717511863b2a680c9b44b5419d8ac1ff7bf2/debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7", size = 1711184 }, + { url = "https://files.pythonhosted.org/packages/57/0c/c2ec581541923a4d36cee4fd2419c1211c986849fc61097f87aa81fc6ad3/debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a", size = 2997629 }, + { url = "https://files.pythonhosted.org/packages/a8/46/3072c2cd3b20f435968275d316f6aea7ddbb760386324e6578278bc2eb99/debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed", size = 4764678 }, + { url = "https://files.pythonhosted.org/packages/38/25/e738d6f782beba924c0e10dfde2061152f1ea3608dff0e5a5bfb30c311e9/debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e", size = 4788002 }, + { url = "https://files.pythonhosted.org/packages/ad/72/fd138a10dda16775607316d60dd440fcd23e7560e9276da53c597b5917e9/debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a", size = 1786504 }, + { url = "https://files.pythonhosted.org/packages/e2/0e/d0e6af2d7bbf5ace847e4d3bd41f8f9d4a0764fcd8058f07a1c51618cbf2/debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b", size = 2642077 }, + { url = "https://files.pythonhosted.org/packages/f6/55/2a1dc192894ba9b368cdcce15315761a00f2d4cd7de4402179648840e480/debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408", size = 4702081 }, + { url = "https://files.pythonhosted.org/packages/7f/7f/942b23d64f4896e9f8776cf306dfd00feadc950a38d56398610a079b28b1/debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3", size = 4715571 }, + { url = "https://files.pythonhosted.org/packages/9a/82/7d9e1f75fb23c876ab379008c7cf484a1cfa5ed47ccaac8ba37c75e6814e/debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156", size = 1436398 }, + { url = "https://files.pythonhosted.org/packages/fd/b6/ee71d5e73712daf8307a9e85f5e39301abc8b66d13acd04dfff1702e672e/debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb", size = 1437465 }, + { url = "https://files.pythonhosted.org/packages/6c/d8/8e32bf1f2e0142f7e8a2c354338b493e87f2c44e77e233b3a140fb5efa03/debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7", size = 4581313 }, + { url = "https://files.pythonhosted.org/packages/f7/be/2fbaffecb063de228b2b3b6a1750b0b745e5dc645eddd52be8b329933c0b/debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c", size = 4581209 }, + { url = "https://files.pythonhosted.org/packages/02/49/b595c34d7bc690e8d225a6641618a5c111c7e13db5d9e2b756c15ce8f8c6/debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44", size = 4824118 }, +] + +[[package]] +name = "decorator" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/14/1e41f504a246fc224d2ac264c227975427a85caf37c3979979edb9b1b232/Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3", size = 2974416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", size = 9561 }, +] + +[[package]] +name = "devtools" +version = "0.12.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/84/75/b78198620640d394bc435c17bb49db18419afdd6cfa3ed8bcfe14034ec80/devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507", size = 75005 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/ae/afb1487556e2dc827a17097aac8158a25b433a345386f0e249f6d2694ccb/devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7", size = 19411 }, +] + +[[package]] +name = "dictdiffer" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/7b/35cbccb7effc5d7e40f4c55e2b79399e1853041997fcda15c9ff160abba0/dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578", size = 31513 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/ef/4cb333825d10317a36a1154341ba37e6e9c087bac99c1990ef07ffdb376f/dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595", size = 16754 }, +] + +[[package]] +name = "dill" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252 }, +] + +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550 }, +] + +[[package]] +name = "distlib" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/91/e2df406fb4efacdf46871c25cde65d3c6ee5e173b7e5a4547a47bae91920/distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64", size = 609931 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/41/9307e4f5f9976bc8b7fea0b66367734e8faf3ec84bc0d412d8cfabbb66cd/distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", size = 468850 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/7d/c871f55054e403fdfd6b8f65fd6d1c4e147ed100d3e9f9ba1fe695403939/dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc", size = 332727 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/a1/8c5287991ddb8d3e4662f71356d9656d91ab3a36618c3dd11b280df0d255/dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50", size = 307696 }, +] + +[[package]] +name = "docstring-parser" +version = "0.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533 }, +] + +[[package]] +name = "duckdb" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/b2/e16f0f0250d897396df2839390414fdb9f33d44651366889558aca19b682/duckdb-1.1.0.tar.gz", hash = "sha256:b4d4c12b1f98732151bd31377753e0da1a20f6423016d2d097d2e31953ec7c23", size = 12230980 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/d8/6d2b8dd06ae6d95a67e22de2a5d44cddabe4aed1042813c1a9ec48ce09b2/duckdb-1.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5e4cbc408e6e41146dea89b9044dae7356e353db0c96b183e5583ee02bc6ae5d", size = 15432844 }, + { url = "https://files.pythonhosted.org/packages/c9/bb/e1d720200a45fc990868caff15e00e39d281afd1c5d57f846c8387fe655f/duckdb-1.1.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:6370ae27ec8167ccfbefb94f58ad9fdc7bac142399960549d6d367f233189868", size = 32245908 }, + { url = "https://files.pythonhosted.org/packages/03/95/d0bb3bc96968655d805c74a2f2a74a679cdd568386de98b26c16fdd60f75/duckdb-1.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4e1c3414f7fd01f4810dc8b335deffc91933a159282d65fef11c1286bc0ded04", size = 16897500 }, + { url = "https://files.pythonhosted.org/packages/5f/b9/b7ac38a99000e4281022bd2edcf3ba90e8628370834a194f09aa507106b3/duckdb-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6bc2a58689adf5520303c5f68b065b9f980bd31f1366c541b8c7490abaf55cd", size = 18441800 }, + { url = "https://files.pythonhosted.org/packages/f6/ab/c7d5e79d2984001911d864af8ec74492da5dba558737b10774ce27587239/duckdb-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d02be208d2885ca085d4c852b911493b8cdac9d6eae893259da32bd72a437c25", size = 20097722 }, + { url = "https://files.pythonhosted.org/packages/d2/dd/37bd410c15751499e7b08f4ef7f7a8cf59725dc9c0f3a1c6358393400926/duckdb-1.1.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:655df442ceebfc6f3fd6c8766e04b60d44dddedfa90275d794f9fab2d3180879", size = 18248603 }, + { url = "https://files.pythonhosted.org/packages/a9/47/19f95ba03547bbe2e3dbcd5d7a2636db4564b95604a18b5bc6f0ff7f874a/duckdb-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6e183729bb64be7798ccbfda6283ebf423c869268c25af2b56929e48f763be2f", size = 21558779 }, + { url = "https://files.pythonhosted.org/packages/4f/6a/2aefd78ebaa7a67081c00d4ba99045d33ad146eca389f83e2ac1d0971672/duckdb-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:61fb838da51e07ceb0222c4406b059b90e10efcc453c19a3650b73c0112138c4", size = 10933612 }, + { url = "https://files.pythonhosted.org/packages/4f/fe/2679fe8fb8ca4b1316a93650c1182ba39566e7f39a79ff2563dc73f5b502/duckdb-1.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:7807e2f0d3344668e433f0dc1f54bfaddd410589611393e9a7ed56f8dec9514f", size = 15436938 }, + { url = "https://files.pythonhosted.org/packages/8d/f4/1f68e4f4c417163c3f719dd0b7b958e6dda38c30cb70c4615e44c4355fd8/duckdb-1.1.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:3da30b7b466f710d52caa1fdc3ef0bf4176ad7f115953cd9f8b0fbf0f723778f", size = 32250316 }, + { url = "https://files.pythonhosted.org/packages/12/24/ee847da231f9c7b500a27a8ef64e31351688fd7a0e70913f2f6cfb89bbc3/duckdb-1.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:b9b6a77ef0183f561b1fc2945fcc762a71570ffd33fea4e3a855d413ed596fe4", size = 16898024 }, + { url = "https://files.pythonhosted.org/packages/c0/4c/4ca6a3a3422c947275f6ca3cdccb3172e75351df846c9f6f1d32555b2e4a/duckdb-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16243e66a9fd0e64ee265f2634d137adc6593f54ddf3ef55cb8a29e1decf6e54", size = 18450253 }, + { url = "https://files.pythonhosted.org/packages/a4/87/c29ae8510f90a6169247ccdf72f2ce41ce7cfc198f32e69de21dac530fe8/duckdb-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42b910a149e00f40a1766dc74fa309d4255b912a5d2fdcc387287658048650f6", size = 20105066 }, + { url = "https://files.pythonhosted.org/packages/00/08/53d643d2a5cf1787f9dea9cb2838eaa0c6767a651e81f36b28cc9fa573cf/duckdb-1.1.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47849d546dc4238c0f20e95fe53b621aa5b08684e68fff91fd84a7092be91a17", size = 18253861 }, + { url = "https://files.pythonhosted.org/packages/5f/fd/53dde5c1851c7386a02dff0c88a6b0b2e304f7107164a9b925a2aa0fbcef/duckdb-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11ec967b67159361ceade34095796a8d19368ea5c30cad988f44896b082b0816", size = 21568219 }, + { url = "https://files.pythonhosted.org/packages/56/f0/02ed53651c9f59940da409fab8ce546120d31bdca722990a5bfbe1f7e8f2/duckdb-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:510b5885ed6c267b9c0e1e7c6138fdffc2dd6f934a5a95b76da85da127213338", size = 10934060 }, + { url = "https://files.pythonhosted.org/packages/e1/79/5437a9f2b1169cbcf733e41db7e4404fe6f248d6a8c57b74275ab822457f/duckdb-1.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:657bc7ac64d5faf069a782ae73afac51ef30ae2e5d0e09ce6a09d03db84ab35e", size = 15446208 }, + { url = "https://files.pythonhosted.org/packages/40/66/b4869b7ab1e6ee3e0615ee7296456a70b22d77c5bee9fb284122de889d9c/duckdb-1.1.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:89f3de8cba57d19b41cd3c47dd06d979bd2a2ffead115480e37afbe72b02896d", size = 32282770 }, + { url = "https://files.pythonhosted.org/packages/66/22/0415dcb6b2b01f5b450d42bc7fd27e6f5a4834402607f01bbaf5150a4696/duckdb-1.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f6486323ab20656d22ffa8f3c6e109dde30d0b327b7c831f22ebcfe747f97fb0", size = 16921818 }, + { url = "https://files.pythonhosted.org/packages/dd/44/067ea9a7005c79672118d504ca2ae665337142c6f3d805313e647b822ab2/duckdb-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78a4510f82431ee3f14db689fe8727a4a9062c8f2fbb3bcfe3bfad3c1a198004", size = 18443809 }, + { url = "https://files.pythonhosted.org/packages/4c/4a/831caae82688ac7713aa9b0795054e585b0d2bcc0415eafe548eae38518b/duckdb-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64bf2a6e23840d662bd2ac09206a9bd4fa657418884d69e5c352d4456dc70b3c", size = 20108366 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/51e4b1498e009391f9bbd678f5a36565aa0963c4c6abf93684db6a4aa9ed/duckdb-1.1.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23fc9aa0af74e3803ed90c8d98280fd5bcac8c940592bf6288e8fd60fb051d00", size = 18255111 }, + { url = "https://files.pythonhosted.org/packages/83/4f/d1c57db42a5c32f6a56d7c2c6f8c007f96cbe865bbf96759b038cb5f2117/duckdb-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f3aea31341ce400640dd522e4399b941f66df17e39884f446638fe958d6117c", size = 21575999 }, + { url = "https://files.pythonhosted.org/packages/d3/18/6951780ed259b1fc95fcc9719d92a251253f8115b9483c73805ea6d05cea/duckdb-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:3db4ab31c20de4edaef152930836b38e7662cd71370748fdf2c38ba9cf854dc4", size = 10934750 }, +] + +[[package]] +name = "durationpy" +version = "0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/b8/074abdcc251bec87da6c5b19b88d7898ec7996c6780d40c6ac5000d3dd47/durationpy-0.7.tar.gz", hash = "sha256:8447c43df4f1a0b434e70c15a38d77f5c9bd17284bfc1ff1d430f233d5083732", size = 3168 } + +[[package]] +name = "ecdsa" +version = "0.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/d0/ec8ac1de7accdcf18cfe468653ef00afd2f609faf67c423efbd02491051b/ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8", size = 197791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/e7/ed3243b30d1bec41675b6394a1daae46349dc2b855cb83be846a5a918238/ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a", size = 149266 }, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, +] + +[[package]] +name = "embedchain" +version = "0.1.121" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alembic" }, + { name = "beautifulsoup4" }, + { name = "chromadb" }, + { name = "cohere" }, + { name = "google-cloud-aiplatform" }, + { name = "gptcache" }, + { name = "langchain" }, + { name = "langchain-cohere" }, + { name = "langchain-community" }, + { name = "langchain-openai" }, + { name = "mem0ai" }, + { name = "openai" }, + { name = "posthog" }, + { name = "pypdf" }, + { name = "pysbd" }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "schema" }, + { name = "sqlalchemy" }, + { name = "tiktoken" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/fa/6bb71c4d5eb4baaded3de2935ca9261611464d64b6d4d1db31f4f0e5260e/embedchain-0.1.121.tar.gz", hash = "sha256:1427a43fd92b0e5303d0d733ebcd5310df14da8bd8dba0b08818d0d3658e7c3e", size = 124646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/54/37fa96129096ce64b64db3a23693a0df607a31c62e300dd9bd27d13e0688/embedchain-0.1.121-py3-none-any.whl", hash = "sha256:c756e8750fb9e3431b6d2a0b0dfbb0dfebeae2d7669d3dd6894311a632abfe77", size = 210908 }, +] + +[[package]] +name = "emoji" +version = "2.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/49/e456f5621ed86c8e77c343c9de2a3bfbcfb6dc88885d6e81feec030fd495/emoji-2.13.0.tar.gz", hash = "sha256:e32e46a1b4445dffbc37cc82ea2d0dacb9323b857f14297eba7decaadfb4890e", size = 562776 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/fa/f9ed99b289acddc28b79e41700db67abf48b91895c58e08520a304bbfb49/emoji-2.13.0-py3-none-any.whl", hash = "sha256:6c8027f02c448731fd5f13e38521e14594e48e9c5d2c3862f95d086d8aa84a7c", size = 553164 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612 }, +] + +[[package]] +name = "executing" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/7d45f492c2c4a0e8e0fad57d081a7c8a0286cdd86372b070cca1ec0caa1e/executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab", size = 977485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/fd/afcd0496feca3276f509df3dbd5dae726fcc756f1a08d9e25abe1733f962/executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf", size = 25805 }, +] + +[[package]] +name = "fastapi" +version = "0.111.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "email-validator" }, + { name = "fastapi-cli" }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "python-multipart" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/ab/9f461ced846964e01c7a6ab25ea79ce2f07743285697b03c8e0e83d72e3e/fastapi-0.111.1.tar.gz", hash = "sha256:ddd1ac34cb1f76c2e2d7f8545a4bcb5463bce4834e81abf0b189e0c359ab2413", size = 288903 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/d4/eb78f7c2648a3585095623f207d7e4b85a1be30347e01e0fdcd1d7d167a9/fastapi-0.111.1-py3-none-any.whl", hash = "sha256:4f51cfa25d72f9fbc3280832e84b32494cf186f50158d364a8765aabf22587bf", size = 92210 }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/f8/1ad5ce32d029aeb9117e9a5a9b3e314a8477525d60c12a9b7730a3c186ec/fastapi_cli-0.0.5.tar.gz", hash = "sha256:d30e1239c6f46fcb95e606f02cdda59a1e2fa778a54b64686b3ff27f6211ff9f", size = 15571 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/ea/4b5011012ac925fe2f83b19d0e09cee9d324141ec7bf5e78bb2817f96513/fastapi_cli-0.0.5-py3-none-any.whl", hash = "sha256:e94d847524648c748a5350673546bbf9bcaeb086b33c24f2e82e021436866a46", size = 9489 }, +] + +[[package]] +name = "fastavro" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/56/72dc3fa6985c7f27b392cd3991c466eb61208f3c6cb7fc2f12e6bfc6f774/fastavro-1.9.7.tar.gz", hash = "sha256:13e11c6cb28626da85290933027cd419ce3f9ab8e45410ef24ce6b89d20a1f6c", size = 987818 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/24/0e9940a19aea0599987807f261d9ae66a9c180e6f14464b2b738b06cc48f/fastavro-1.9.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc811fb4f7b5ae95f969cda910241ceacf82e53014c7c7224df6f6e0ca97f52f", size = 1037248 }, + { url = "https://files.pythonhosted.org/packages/36/f8/854fa8c91c0e8a4f7aa26711e0a8e52d1eb408066a3c56fe0746402b06df/fastavro-1.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb8749e419a85f251bf1ac87d463311874972554d25d4a0b19f6bdc56036d7cf", size = 3024356 }, + { url = "https://files.pythonhosted.org/packages/3f/5c/e9d528770af9c1cb38611e6b9a8976dfb822a876cbe5d0c9801988d56d1c/fastavro-1.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b2f9bafa167cb4d1c3dd17565cb5bf3d8c0759e42620280d1760f1e778e07fc", size = 3073783 }, + { url = "https://files.pythonhosted.org/packages/ed/49/d667623c67351cfd884f8643edcde8e75210988648b53253d082ef4e5bb9/fastavro-1.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e87d04b235b29f7774d226b120da2ca4e60b9e6fdf6747daef7f13f218b3517a", size = 2967851 }, + { url = "https://files.pythonhosted.org/packages/56/89/f37e824942867771027f1e2e297b3d1f0ee2e72f8faae610d5f863258df3/fastavro-1.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b525c363e267ed11810aaad8fbdbd1c3bd8837d05f7360977d72a65ab8c6e1fa", size = 3122284 }, + { url = "https://files.pythonhosted.org/packages/72/54/d73fd1e91385f45e04168c5660ee5f18222ed644d52f0271207d3e7807b5/fastavro-1.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:6312fa99deecc319820216b5e1b1bd2d7ebb7d6f221373c74acfddaee64e8e60", size = 497169 }, + { url = "https://files.pythonhosted.org/packages/89/61/b8b18aebc01e5d5a77042f6d555fe091d3279242edd5639252c9fcb9a3b7/fastavro-1.9.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ec8499dc276c2d2ef0a68c0f1ad11782b2b956a921790a36bf4c18df2b8d4020", size = 1040249 }, + { url = "https://files.pythonhosted.org/packages/a0/a1/c6539ac9f6e068c1920f5d6a823113cd60088160050ed32ee4e7b960c1aa/fastavro-1.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d9d96f98052615ab465c63ba8b76ed59baf2e3341b7b169058db104cbe2aa0", size = 3312219 }, + { url = "https://files.pythonhosted.org/packages/68/2b/0015355fb7dbf31dee0f3e69e6fa1ff43967500a8b1abb81de5a15f24b16/fastavro-1.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919f3549e07a8a8645a2146f23905955c35264ac809f6c2ac18142bc5b9b6022", size = 3334160 }, + { url = "https://files.pythonhosted.org/packages/60/08/62707fe5bfb7c4dca99132c969b38270579bf96408552a0baf201e861e84/fastavro-1.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9de1fa832a4d9016724cd6facab8034dc90d820b71a5d57c7e9830ffe90f31e4", size = 3282829 }, + { url = "https://files.pythonhosted.org/packages/b2/7e/21b3066973c60309f8e58f3d0d63dfdad196354217416384577c1e8faee0/fastavro-1.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1d09227d1f48f13281bd5ceac958650805aef9a4ef4f95810128c1f9be1df736", size = 3419466 }, + { url = "https://files.pythonhosted.org/packages/43/b3/cac5151810a8c8b5ef318b488a61288fe07e623e9b342c3fc2f60cbfdede/fastavro-1.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:2db993ae6cdc63e25eadf9f93c9e8036f9b097a3e61d19dca42536dcc5c4d8b3", size = 500131 }, + { url = "https://files.pythonhosted.org/packages/bb/30/e6f13d07ca6b2ba42719192a36233d660d75bbdc91026a20da0e08f8d5f3/fastavro-1.9.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4e1289b731214a7315884c74b2ec058b6e84380ce9b18b8af5d387e64b18fc44", size = 1035760 }, + { url = "https://files.pythonhosted.org/packages/e0/29/dd2f5b2213be103a6b22cbf62e1e17a8423aa687c05f37510688d7ed5987/fastavro-1.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac69666270a76a3a1d0444f39752061195e79e146271a568777048ffbd91a27", size = 3263393 }, + { url = "https://files.pythonhosted.org/packages/69/4c/011823812409d16c6785754c5332e3f551b8131ea14cf9dd14155a61baaf/fastavro-1.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be089be8c00f68e343bbc64ca6d9a13e5e5b0ba8aa52bcb231a762484fb270e", size = 3328621 }, + { url = "https://files.pythonhosted.org/packages/85/1a/d388306a809ad3b4820f1bd67b2fdd9dd9d0af8782dea6524bdb7fd249ef/fastavro-1.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d576eccfd60a18ffa028259500df67d338b93562c6700e10ef68bbd88e499731", size = 3256407 }, + { url = "https://files.pythonhosted.org/packages/68/dc/66cc5227809074beb61cf19bfd615b5b1c0bce0d833af69a2d02b4408316/fastavro-1.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee9bf23c157bd7dcc91ea2c700fa3bd924d9ec198bb428ff0b47fa37fe160659", size = 3418234 }, + { url = "https://files.pythonhosted.org/packages/c8/0c/92b468e4649e61eaa2d93a92e19a5b57a0f6cecaa236c53a76f3f72a4696/fastavro-1.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:b6b2ccdc78f6afc18c52e403ee68c00478da12142815c1bd8a00973138a166d0", size = 487778 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "firecrawl-py" +version = "0.0.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/64/d480cdb6ab75ea205d187fcbbd726b0c8a60f00343beb49dfbd1be4f40c8/firecrawl_py-0.0.16.tar.gz", hash = "sha256:6c662fa0a549bc7f5c0acb704baba6731869ca0451094034264dfc1b4eb086e4", size = 11056 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/55/6e74b49b32ec95f11adb12961f1cb25e14199daa91742c781ee7665bd045/firecrawl_py-0.0.16-py3-none-any.whl", hash = "sha256:9024f483b501852a6b9c4e6cdfc9e8dde452d922afac357080bb278a0c9c2a26", size = 9464 }, +] + +[[package]] +name = "flatbuffers" +version = "24.3.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/74/2df95ef84b214d2bee0886d572775a6f38793f5ca6d7630c3239c91104ac/flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4", size = 22139 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/f0/7e988a019bc54b2dbd0ad4182ef2d53488bb02e58694cd79d61369e85900/flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812", size = 26784 }, +] + +[[package]] +name = "flower" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "celery" }, + { name = "humanize" }, + { name = "prometheus-client" }, + { name = "pytz" }, + { name = "tornado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a1/357f1b5d8946deafdcfdd604f51baae9de10aafa2908d0b7322597155f92/flower-2.0.1.tar.gz", hash = "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0", size = 3220408 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ff/ee2f67c0ff146ec98b5df1df637b2bc2d17beeb05df9f427a67bd7a7d79c/flower-2.0.1-py2.py3-none-any.whl", hash = "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2", size = 383553 }, +] + +[[package]] +name = "frozenlist" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/3d/2102257e7acad73efc4a0c306ad3953f68c504c16982bbdfee3ad75d8085/frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b", size = 37820 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/35/1328c7b0f780d34f8afc1d87ebdc2bb065a123b24766a0b475f0d67da637/frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac", size = 94315 }, + { url = "https://files.pythonhosted.org/packages/f4/d6/ca016b0adcf8327714ccef969740688808c86e0287bf3a639ff582f24e82/frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868", size = 53805 }, + { url = "https://files.pythonhosted.org/packages/ae/83/bcdaa437a9bd693ba658a0310f8cdccff26bd78e45fccf8e49897904a5cd/frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776", size = 52163 }, + { url = "https://files.pythonhosted.org/packages/d4/e9/759043ab7d169b74fe05ebfbfa9ee5c881c303ebc838e308346204309cd0/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a", size = 238595 }, + { url = "https://files.pythonhosted.org/packages/f8/ce/b9de7dc61e753dc318cf0de862181b484178210c5361eae6eaf06792264d/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad", size = 262428 }, + { url = "https://files.pythonhosted.org/packages/36/ce/dc6f29e0352fa34ebe45421960c8e7352ca63b31630a576e8ffb381e9c08/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c", size = 258867 }, + { url = "https://files.pythonhosted.org/packages/51/47/159ac53faf8a11ae5ee8bb9db10327575557504e549cfd76f447b969aa91/frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe", size = 229412 }, + { url = "https://files.pythonhosted.org/packages/ec/25/0c87df2e53c0c5d90f7517ca0ff7aca78d050a8ec4d32c4278e8c0e52e51/frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a", size = 239539 }, + { url = "https://files.pythonhosted.org/packages/97/94/a1305fa4716726ae0abf3b1069c2d922fcfd442538cb850f1be543f58766/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98", size = 253379 }, + { url = "https://files.pythonhosted.org/packages/53/82/274e19f122e124aee6d113188615f63b0736b4242a875f482a81f91e07e2/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75", size = 245901 }, + { url = "https://files.pythonhosted.org/packages/b8/28/899931015b8cffbe155392fe9ca663f981a17e1adc69589ee0e1e7cdc9a2/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5", size = 263797 }, + { url = "https://files.pythonhosted.org/packages/6e/4f/b8a5a2f10c4a58c52a52a40cf6cf1ffcdbf3a3b64f276f41dab989bf3ab5/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950", size = 264415 }, + { url = "https://files.pythonhosted.org/packages/b0/2c/7be3bdc59dbae444864dbd9cde82790314390ec54636baf6b9ce212627ad/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc", size = 253964 }, + { url = "https://files.pythonhosted.org/packages/2e/ec/4fb5a88f6b9a352aed45ab824dd7ce4801b7bcd379adcb927c17a8f0a1a8/frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1", size = 44559 }, + { url = "https://files.pythonhosted.org/packages/61/15/2b5d644d81282f00b61e54f7b00a96f9c40224107282efe4cd9d2bf1433a/frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439", size = 50434 }, + { url = "https://files.pythonhosted.org/packages/01/bc/8d33f2d84b9368da83e69e42720cff01c5e199b5a868ba4486189a4d8fa9/frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0", size = 97060 }, + { url = "https://files.pythonhosted.org/packages/af/b2/904500d6a162b98a70e510e743e7ea992241b4f9add2c8063bf666ca21df/frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49", size = 55347 }, + { url = "https://files.pythonhosted.org/packages/5b/9c/f12b69997d3891ddc0d7895999a00b0c6a67f66f79498c0e30f27876435d/frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced", size = 53374 }, + { url = "https://files.pythonhosted.org/packages/ac/6e/e0322317b7c600ba21dec224498c0c5959b2bce3865277a7c0badae340a9/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0", size = 273288 }, + { url = "https://files.pythonhosted.org/packages/a7/76/180ee1b021568dad5b35b7678616c24519af130ed3fa1e0f1ed4014e0f93/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106", size = 284737 }, + { url = "https://files.pythonhosted.org/packages/05/08/40159d706a6ed983c8aca51922a93fc69f3c27909e82c537dd4054032674/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068", size = 280267 }, + { url = "https://files.pythonhosted.org/packages/e0/18/9f09f84934c2b2aa37d539a322267939770362d5495f37783440ca9c1b74/frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2", size = 258778 }, + { url = "https://files.pythonhosted.org/packages/b3/c9/0bc5ee7e1f5cc7358ab67da0b7dfe60fbd05c254cea5c6108e7d1ae28c63/frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19", size = 272276 }, + { url = "https://files.pythonhosted.org/packages/12/5d/147556b73a53ad4df6da8bbb50715a66ac75c491fdedac3eca8b0b915345/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82", size = 272424 }, + { url = "https://files.pythonhosted.org/packages/83/61/2087bbf24070b66090c0af922685f1d0596c24bb3f3b5223625bdeaf03ca/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec", size = 260881 }, + { url = "https://files.pythonhosted.org/packages/a8/be/a235bc937dd803258a370fe21b5aa2dd3e7bfe0287a186a4bec30c6cccd6/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a", size = 282327 }, + { url = "https://files.pythonhosted.org/packages/5d/e7/b2469e71f082948066b9382c7b908c22552cc705b960363c390d2e23f587/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74", size = 281502 }, + { url = "https://files.pythonhosted.org/packages/db/1b/6a5b970e55dffc1a7d0bb54f57b184b2a2a2ad0b7bca16a97ca26d73c5b5/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2", size = 272292 }, + { url = "https://files.pythonhosted.org/packages/1a/05/ebad68130e6b6eb9b287dacad08ea357c33849c74550c015b355b75cc714/frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17", size = 44446 }, + { url = "https://files.pythonhosted.org/packages/b3/21/c5aaffac47fd305d69df46cfbf118768cdf049a92ee6b0b5cb029d449dcf/frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825", size = 50459 }, + { url = "https://files.pythonhosted.org/packages/b4/db/4cf37556a735bcdb2582f2c3fa286aefde2322f92d3141e087b8aeb27177/frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae", size = 93937 }, + { url = "https://files.pythonhosted.org/packages/46/03/69eb64642ca8c05f30aa5931d6c55e50b43d0cd13256fdd01510a1f85221/frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb", size = 53656 }, + { url = "https://files.pythonhosted.org/packages/3f/ab/c543c13824a615955f57e082c8a5ee122d2d5368e80084f2834e6f4feced/frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b", size = 51868 }, + { url = "https://files.pythonhosted.org/packages/a9/b8/438cfd92be2a124da8259b13409224d9b19ef8f5a5b2507174fc7e7ea18f/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86", size = 280652 }, + { url = "https://files.pythonhosted.org/packages/54/72/716a955521b97a25d48315c6c3653f981041ce7a17ff79f701298195bca3/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480", size = 286739 }, + { url = "https://files.pythonhosted.org/packages/65/d8/934c08103637567084568e4d5b4219c1016c60b4d29353b1a5b3587827d6/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09", size = 289447 }, + { url = "https://files.pythonhosted.org/packages/70/bb/d3b98d83ec6ef88f9bd63d77104a305d68a146fd63a683569ea44c3085f6/frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a", size = 265466 }, + { url = "https://files.pythonhosted.org/packages/0b/f2/b8158a0f06faefec33f4dff6345a575c18095a44e52d4f10c678c137d0e0/frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd", size = 281530 }, + { url = "https://files.pythonhosted.org/packages/ea/a2/20882c251e61be653764038ece62029bfb34bd5b842724fff32a5b7a2894/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6", size = 281295 }, + { url = "https://files.pythonhosted.org/packages/4c/f9/8894c05dc927af2a09663bdf31914d4fb5501653f240a5bbaf1e88cab1d3/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1", size = 268054 }, + { url = "https://files.pythonhosted.org/packages/37/ff/a613e58452b60166507d731812f3be253eb1229808e59980f0405d1eafbf/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b", size = 286904 }, + { url = "https://files.pythonhosted.org/packages/cc/6e/0091d785187f4c2020d5245796d04213f2261ad097e0c1cf35c44317d517/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e", size = 290754 }, + { url = "https://files.pythonhosted.org/packages/a5/c2/e42ad54bae8bcffee22d1e12a8ee6c7717f7d5b5019261a8c861854f4776/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8", size = 282602 }, + { url = "https://files.pythonhosted.org/packages/b6/61/56bad8cb94f0357c4bc134acc30822e90e203b5cb8ff82179947de90c17f/frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89", size = 44063 }, + { url = "https://files.pythonhosted.org/packages/3e/dc/96647994a013bc72f3d453abab18340b7f5e222b7b7291e3697ca1fcfbd5/frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5", size = 50452 }, + { url = "https://files.pythonhosted.org/packages/83/10/466fe96dae1bff622021ee687f68e5524d6392b0a2f80d05001cd3a451ba/frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7", size = 11552 }, +] + +[[package]] +name = "fsspec" +version = "2024.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/62/7c/12b0943011daaaa9c35c2a2e22e5eb929ac90002f08f1259d69aedad84de/fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8", size = 286206 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b", size = 179253 }, +] + +[[package]] +name = "google-api-core" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/5c/31c1742a53b79c8a0c4757b5fae2e8ab9c519cbd7b98c587d4294e1d2d16/google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f", size = 152583 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/dc/6143f67acf5f30717c9e1b1c48fc04c0f59b869be046e6639d3f171640ae/google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a", size = 142162 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + +[[package]] +name = "google-auth" +version = "2.35.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/37/c854a8b1b1020cf042db3d67577c6f84cd1e8ff6515e4f5498ae9e444ea5/google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a", size = 267223 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1f/3a72917afcb0d5cd842cbccb81bf7a8a7b45b4c66d8dc4556ccb3b016bfc/google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f", size = 208968 }, +] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docstring-parser" }, + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-resource-manager" }, + { name = "google-cloud-storage" }, + { name = "packaging" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "shapely" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e2/ea8877e136e8b80272934f9ab52d00eb6721643784e5fb1170718f1e72f9/google-cloud-aiplatform-1.67.1.tar.gz", hash = "sha256:701a19061c8c670baa93464ca0b8a1a8720494f802187cef06bc9fcf952db315", size = 6282628 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/48/6d98ff7528805fa2d406f03fcffbf6e2dda7cc23a444cfaaa512930d0a00/google_cloud_aiplatform-1.67.1-py2.py3-none-any.whl", hash = "sha256:2ff0e1794839fcf74d644f3f54ff2de5d8099b3e388edecc48f6d620c1f3582c", size = 5245423 }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-resumable-media" }, + { name = "packaging" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/07/d6f8c55f68d796a6a045cbb3c1783ed1c77ec641acbf9e6ff78b38b127a4/google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509", size = 455186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/98/2f931388614ea894640f84c1874d72d84d890c093e334a3990e363ff689e/google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9", size = 239012 }, +] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b8/1f/9d1e0ba6919668608570418a9a51e47070ac15aeff64261fb092d8be94c0/google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073", size = 35587 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/0f/2e2061e3fbcb9d535d5da3f58cc8de4947df1786fe6a1355960feb05a681/google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61", size = 29233 }, +] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/32/14d345dee1f290a26bd639da8edbca30958865b7cc7207961e10d2f32282/google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891", size = 394678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/ab/63ab13fb060714b9d1708ca32e0ee41f9ffe42a62e524e7429cde45cfe61/google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175", size = 341861 }, +] + +[[package]] +name = "google-cloud-storage" +version = "2.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/b7/1554cdeb55d9626a4b8720746cba8119af35527b12e1780164f9ba0f659a/google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99", size = 5532864 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/da/95db7bd4f0bd1644378ac1702c565c0210b004754d925a74f526a710c087/google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166", size = 130466 }, +] + +[[package]] +name = "google-crc32c" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/72/c3298da1a3773102359c5a78f20dae8925f5ea876e37354415f68594a6fb/google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc", size = 14472 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/be/d7846cb50e17bf72a70ea2d8159478ac5de0f1170b10cac279f50079e78d/google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa", size = 30267 }, + { url = "https://files.pythonhosted.org/packages/84/3b/29cadae166132e4991087a49dc88906a1d3d5ec22b80f63bc4bc7b6e0431/google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9", size = 30113 }, + { url = "https://files.pythonhosted.org/packages/18/a9/49a7b2c4b7cc69d15778a820734f9beb647b1b4cf1a629ca43e3d3a54c70/google_crc32c-1.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7", size = 37702 }, + { url = "https://files.pythonhosted.org/packages/4b/aa/52538cceddefc7c2d66c6bd59dfe67a50f65a4952f441f91049e4188eb57/google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e", size = 32847 }, + { url = "https://files.pythonhosted.org/packages/b1/2c/1928413d3faae74ae0d7bdba648cf36ed6b03328c562b47046af016b7249/google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc", size = 37844 }, + { url = "https://files.pythonhosted.org/packages/d6/f4/f62fa405e442b37c5676973b759dd6e56cd8d58a5c78662912456526f716/google_crc32c-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42", size = 33444 }, + { url = "https://files.pythonhosted.org/packages/7d/14/ab47972ac79b6e7b03c8be3a7ef44b530a60e69555668dbbf08fc5692a98/google_crc32c-1.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4", size = 30267 }, + { url = "https://files.pythonhosted.org/packages/54/7d/738cb0d25ee55629e7d07da686decf03864a366e5e863091a97b7bd2b8aa/google_crc32c-1.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8", size = 30112 }, + { url = "https://files.pythonhosted.org/packages/3e/6d/33ca50cbdeec09c31bb5dac277c90994edee975662a4c890bda7ffac90ef/google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d", size = 32861 }, + { url = "https://files.pythonhosted.org/packages/67/1e/4870896fc81ec77b1b5ebae7fdd680d5a4d40e19a4b6d724032f996ca77a/google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f", size = 32490 }, + { url = "https://files.pythonhosted.org/packages/00/9c/f5f5af3ddaa7a639d915f8f58b09bbb8d1db90ecd0459b62cd430eb9a4b6/google_crc32c-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3", size = 33446 }, + { url = "https://files.pythonhosted.org/packages/cf/41/65a91657d6a8123c6c12f9aac72127b6ac76dda9e2ba1834026a842eb77c/google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d", size = 30268 }, + { url = "https://files.pythonhosted.org/packages/59/d0/ee743a267c7d5c4bb8bd865f7d4c039505f1c8a4b439df047fdc17be9769/google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b", size = 30113 }, + { url = "https://files.pythonhosted.org/packages/25/53/e5e449c368dd26ade5fb2bb209e046d4309ed0623be65b13f0ce026cb520/google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00", size = 32995 }, + { url = "https://files.pythonhosted.org/packages/52/12/9bf6042d5b0ac8c25afed562fb78e51b0641474097e4139e858b45de40a5/google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3", size = 32614 }, + { url = "https://files.pythonhosted.org/packages/76/29/fc20f5ec36eac1eea0d0b2de4118c774c5f59c513f2a8630d4db6991f3e0/google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760", size = 33445 }, + { url = "https://files.pythonhosted.org/packages/e7/ff/ed48d136b65ddc61f5aef6261c58cd817c8cd60640b16680e5419fb17018/google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc", size = 28057 }, + { url = "https://files.pythonhosted.org/packages/14/fb/54deefe679b7d1c1cc81d83396fcf28ad1a66d213bddeb275a8d28665918/google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d", size = 27866 }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.65.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/3b/1599ceafa875ffb951480c8c74f4b77646a6b80e80970698f2aa93c216ce/googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0", size = 113657 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/08/49bfe7cf737952cc1a9c43e80cc258ed45dad7f183c5b8276fc94cb3862d/googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63", size = 220890 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, +] + +[[package]] +name = "gprof2dot" +version = "2024.6.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/11/16fc5b985741378812223f2c6213b0a95cda333b797def622ac702d28e81/gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab", size = 36536 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/27/15c4d20871a86281e2bacde9e9f634225d1c2ed0db072f98acf201022411/gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696", size = 34763 }, +] + +[[package]] +name = "gptcache" +version = "0.1.44" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "numpy" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/73/5cc20749e06017044106837550384f5d8ed00b8e9570689f17e7292e2d23/gptcache-0.1.44.tar.gz", hash = "sha256:d3d5e6a75c57594dc58212c2d6c53a7999c23ede30e0be66d213d885c0ad0be9", size = 95969 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/87/8dde0a3757bc207805f751b47878888b09db4a464ae48a55f386f091b488/gptcache-0.1.44-py3-none-any.whl", hash = "sha256:11ddd63b173dc3822b8c2eb7588ea947c825845ed0737b043038a238286bfec4", size = 131634 }, +] + +[[package]] +name = "grandalf" +version = "0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/0e/4ac934b416857969f9135dec17ac80660634327e003a870835dd1f382659/grandalf-0.8.tar.gz", hash = "sha256:2813f7aab87f0d20f334a3162ccfbcbf085977134a17a5b516940a93a77ea974", size = 38128 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/30/44c7eb0a952478dbb5f2f67df806686d6a7e4b19f6204e091c4f49dc7c69/grandalf-0.8-py3-none-any.whl", hash = "sha256:793ca254442f4a79252ea9ff1ab998e852c1e071b863593e5383afee906b4185", size = 41802 }, +] + +[[package]] +name = "greenlet" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/1b/3d91623c3eff61c11799e7f3d6c01f6bfa9bd2d1f0181116fd0b9b108a40/greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0", size = 183954 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/a4/f2493536dad2539b84f61e60b6071e29bea05e8148cfa67237aeba550898/greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8", size = 267948 }, + { url = "https://files.pythonhosted.org/packages/80/ae/108d1ed1a9e8472ff6a494121fd45ab5666e4c3009b3bfc595e3a0683570/greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca", size = 652984 }, + { url = "https://files.pythonhosted.org/packages/16/be/4f5fd9ea44eb58e32ecfaf72839f842e2f343eaa0ff5c24cadbcfe22aad5/greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc", size = 670521 }, + { url = "https://files.pythonhosted.org/packages/a0/ab/194c82e7c81a884057149641a55f6fd1755b396fd19a88ed4ca2472c2724/greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d", size = 661985 }, + { url = "https://files.pythonhosted.org/packages/b9/46/d97ad3d8ca6ab8c4f166493164b5461161a295887b6d9ca0bbd4ccdede78/greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25", size = 664007 }, + { url = "https://files.pythonhosted.org/packages/b2/f5/15440aaf5e0ccb7cb050fe8669b5f625ee6ed2e8ba82315b4bc2c0944b86/greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682", size = 617086 }, + { url = "https://files.pythonhosted.org/packages/24/b5/24dc29e920a1f6b4e2f920fdd642a3364a5b082988931b7d5d1229d48340/greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1", size = 1151877 }, + { url = "https://files.pythonhosted.org/packages/05/76/5902a38828f06b2bd964ffca36275439c3be993184b9540341585aadad3d/greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99", size = 1177941 }, + { url = "https://files.pythonhosted.org/packages/ca/7d/7c348b13b67930c6d0ee1438ec4be64fc2c8f23f55bd50179db2a5303944/greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54", size = 293302 }, + { url = "https://files.pythonhosted.org/packages/e7/1f/fe4c6f388c9a6736b5afc783979ba6d0fc9ee9c5edb5539184ac88aa8b8c/greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345", size = 269249 }, + { url = "https://files.pythonhosted.org/packages/cc/7a/12e04050093151008ee768580c4fd701c4a4de7ecc01d96af73a130c04ed/greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6", size = 659412 }, + { url = "https://files.pythonhosted.org/packages/2d/34/17f5623158ec1fff9326965d61705820aa498cdb5d179f6d42dbc2113c10/greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc", size = 674309 }, + { url = "https://files.pythonhosted.org/packages/e8/30/22f6c2bc2e21b51ecf0b59f503f00041fe7fc44f5a9923dc701f686a0e47/greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6", size = 667454 }, + { url = "https://files.pythonhosted.org/packages/3e/e8/5d522a89f890a4ffefd02c21a12be503c03071fb5eb586d216e4f263d9e7/greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f", size = 668913 }, + { url = "https://files.pythonhosted.org/packages/ea/7d/d87885ed60a5bf9dbb4424386b84ab96a50b2f4eb2d00641788b73bdb2cd/greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19", size = 622696 }, + { url = "https://files.pythonhosted.org/packages/56/fe/bc264a26bc7baeb619334385aac76dd19d0ec556429fb0e74443fd7974b6/greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a", size = 1155330 }, + { url = "https://files.pythonhosted.org/packages/46/b3/cc9cff0bebd128836cf75a200b9e4b319abf4b72e983c4931775a4976ea4/greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b", size = 1182436 }, + { url = "https://files.pythonhosted.org/packages/98/bb/208f0b192f6c22e5371d0fd6dfa50d429562af8d79a4045bad0f2d7df4ec/greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9", size = 293816 }, + { url = "https://files.pythonhosted.org/packages/58/a8/a54a8816187e55f42fa135419efe3a88a2749f75ed4169abc6bf300ce0a9/greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27", size = 270018 }, + { url = "https://files.pythonhosted.org/packages/89/dc/d2eaaefca5e295ec9cc09c958f7c3086582a6e1d93de31b780e420cbf6dc/greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303", size = 662072 }, + { url = "https://files.pythonhosted.org/packages/e8/65/577971a48f06ebd2f759466b4c1c59cd4dc901ec43f1a775207430ad80b9/greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf", size = 675375 }, + { url = "https://files.pythonhosted.org/packages/77/d5/489ee9a7a9bace162d99c52f347edc14ffa570fdf5684e95d9dc146ba1be/greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc", size = 669947 }, + { url = "https://files.pythonhosted.org/packages/75/4a/c612e5688dbbce6873763642195d9902e04de43914fe415661fe3c435e1e/greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f", size = 671632 }, + { url = "https://files.pythonhosted.org/packages/aa/67/12f51aa488d8778e1b8e9fcaeb25678524eda29a7a133a9263d6449fe011/greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a", size = 626707 }, + { url = "https://files.pythonhosted.org/packages/fb/e8/9374e77fc204973d6d901c8bb2d7cb223e81513754874cbee6cc5c5fc0ba/greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665", size = 1154076 }, + { url = "https://files.pythonhosted.org/packages/a2/90/912a1227a841d5df57d6dbe5730e049d5fd38c902c3940e45222360ca336/greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811", size = 1182665 }, + { url = "https://files.pythonhosted.org/packages/0d/20/89674b7d62a19138b3352f6080f2ff3e1ee4a298b29bb793746423d0b908/greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b", size = 294647 }, + { url = "https://files.pythonhosted.org/packages/f9/5f/fb128714bbd96614d570fff1d91bbef7a49345bea183e9ea19bdcda1f235/greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd", size = 268913 }, + { url = "https://files.pythonhosted.org/packages/cc/d2/460d00a72720a8798815d29cc4281b72103910017ca2d560a12f801b2138/greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca", size = 662715 }, + { url = "https://files.pythonhosted.org/packages/86/01/852b8c516b35ef2b16812655612092e02608ea79de7e79fde841cfcdbae4/greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64", size = 675985 }, + { url = "https://files.pythonhosted.org/packages/eb/9b/39930fdefa5dab2511ed813a6764458980e04e10c8c3560862fb2f340128/greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b", size = 670880 }, + { url = "https://files.pythonhosted.org/packages/66/49/de46b2da577000044e7f5ab514021bbc48a0b0c6dd7af2da9732db36c584/greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989", size = 672944 }, + { url = "https://files.pythonhosted.org/packages/af/c1/abccddcb2ec07538b6ee1fa30999a239a1ec807109a8dc069e55288df636/greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17", size = 629493 }, + { url = "https://files.pythonhosted.org/packages/c1/e8/30c84a3c639691f6c00b04575abd474d94d404a9ad686e60ba0c17c797d0/greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5", size = 1150524 }, + { url = "https://files.pythonhosted.org/packages/f7/ed/f25832e30a54a92fa13ab94a206f2ea296306acdf5f6a48f88bbb41a6e44/greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484", size = 1180196 }, + { url = "https://files.pythonhosted.org/packages/87/b0/ac381b73c9b9e2cb55970b9a5842ff5b6bc83a7f23aedd3dded1589f0039/greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0", size = 294593 }, +] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", extra = ["grpc"] }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/41/f01bf46bac4034b4750575fe87c80c5a43a8912847307955e22f2125b60c/grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001", size = 17664 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/7d/da3875b7728bc700eeb28b513754ce237c04ac7cbf8559d76b0464ee01cb/grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e", size = 24866 }, +] + +[[package]] +name = "grpcio" +version = "1.66.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/42/94293200e40480d79fc876b2330e7dffb20f959b390afa77c0dbaa0c8372/grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2", size = 12326405 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/9a/95640ac9a29fdfb40bc6ad2f2a33710e55c1adbda831712254d96530e990/grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492", size = 4969896 }, + { url = "https://files.pythonhosted.org/packages/7a/63/c16a70596438de9bc62c2e7c61ad7a82d9290b7545e61278dae7a0db753f/grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac", size = 10586680 }, + { url = "https://files.pythonhosted.org/packages/2a/d5/2a3567aace955a561e0eef9b79bd187acad8949a0fca5d1470f36c1e60c7/grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60", size = 5483551 }, + { url = "https://files.pythonhosted.org/packages/c1/c1/3ee60d4e425f36b47c6e0f559aa40d6d0012586cffda165346157542a504/grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f", size = 6088989 }, + { url = "https://files.pythonhosted.org/packages/e4/6d/ee10b1bbe8b1744b6e8570e313ec873a13874494c1663a0b8260a6115913/grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083", size = 5733760 }, + { url = "https://files.pythonhosted.org/packages/79/31/9058f0f819828807e8503d17fb3ded21702b38b3e32a1552f76e0d59ff27/grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a", size = 6404001 }, + { url = "https://files.pythonhosted.org/packages/a6/0b/9b74b1ee3e18c9ba4751f02e30165c6f16dcc5665634bc0d689cbb5ed05f/grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d", size = 5990244 }, + { url = "https://files.pythonhosted.org/packages/52/0e/775f74401a5b6924e4976549a6f7d2ef4c0fa09340e2738ec46d0e4dff61/grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c", size = 3554349 }, + { url = "https://files.pythonhosted.org/packages/f3/72/6046088fa273d2c4fe72009d2411d5ccd053017014b1197c4881ead3ee70/grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858", size = 4288627 }, + { url = "https://files.pythonhosted.org/packages/22/8a/15d758ce27c82ba4760b9e221e153db02e4a7acd71dcdd7d5f8d3ad83630/grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a", size = 4978160 }, + { url = "https://files.pythonhosted.org/packages/0f/45/9d666f5b447cad3762cca22c8a805677a11976d8a2c82484659d96a31937/grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26", size = 10620579 }, + { url = "https://files.pythonhosted.org/packages/12/3f/c5e30952a37e9ad266ff7f11c806be1c33253e9daa97e8265f53f10a0b15/grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df", size = 5487691 }, + { url = "https://files.pythonhosted.org/packages/09/9c/d4f0c4c7a9fcdc140701c1b2b4c45d0de293380461ecaa7df76d8b5d8b03/grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22", size = 6089513 }, + { url = "https://files.pythonhosted.org/packages/ab/2d/02890f309feabe9255a406700096e08a0a2b9ed20ab43e86b6e633517b0d/grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1", size = 5732315 }, + { url = "https://files.pythonhosted.org/packages/ea/3a/2040ce42a03b163768e43966e02d9c88b2dcb0f28cb611d976b8d68d3835/grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e", size = 6407861 }, + { url = "https://files.pythonhosted.org/packages/e9/9a/fba2ed0ade08b4c8b5e2456269ddf73a5abbfe9407f9e6bd85d92efb4c9d/grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd", size = 5990083 }, + { url = "https://files.pythonhosted.org/packages/02/7b/7aabc0cf5b8bbe74226cbf4f948d4aa66df0b29095eea44ea465a1b01f13/grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791", size = 3555811 }, + { url = "https://files.pythonhosted.org/packages/45/86/cc31ad1578abd322c403b7425e6b50eb8a48a8f96c2e558dacd0ef472dc1/grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb", size = 4290747 }, + { url = "https://files.pythonhosted.org/packages/25/31/fa15c10757a8703332d9f9eff2ab676e8f8807e8636c01b965a37f806ded/grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a", size = 4912090 }, + { url = "https://files.pythonhosted.org/packages/31/3f/09f796724b44b625e4d294f6df8ab0bb63b4709664bd574ea97a8c0d6159/grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9", size = 10579733 }, + { url = "https://files.pythonhosted.org/packages/bc/d5/15c5934ac3550f682b04753e5481eca18a3710b220e16d3345a7e6f7c9f6/grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d", size = 5421092 }, + { url = "https://files.pythonhosted.org/packages/fa/77/f8ab8d436373ad09e1f8f50bf759b4afc8ad1bc121e5cf7dedd8507a4b63/grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0", size = 6028550 }, + { url = "https://files.pythonhosted.org/packages/1d/6c/a813a5b6d716cbc5cc3e496b186b6878816bf5f32aa7f7ae5f9d8dc5e669/grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761", size = 5672550 }, + { url = "https://files.pythonhosted.org/packages/a0/72/06962e2891fe3846a9dc61547d8ef35151b8976a083187e6647e65306c45/grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815", size = 6354719 }, + { url = "https://files.pythonhosted.org/packages/43/10/4db87a953a3f3c73a8284e176556b6eca33496b8ffaa93107f37f772148e/grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524", size = 5933351 }, + { url = "https://files.pythonhosted.org/packages/47/c7/f1fdd77bbe1c6459777a2d604228cc80124373e75d775c2a019755c29182/grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759", size = 3538005 }, + { url = "https://files.pythonhosted.org/packages/66/2b/a6e68d7ea6f4fbc31cce20e354d6cef484da0a9891ee6a3eaf3aa9659d01/grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734", size = 4275565 }, +] + +[[package]] +name = "grpcio-status" +version = "1.62.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448 }, +] + +[[package]] +name = "grpcio-tools" +version = "1.62.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/eb/eb0a3aa9480c3689d31fd2ad536df6a828e97a60f667c8a93d05bdf07150/grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1", size = 5117556 }, + { url = "https://files.pythonhosted.org/packages/f3/fb/8be3dda485f7fab906bfa02db321c3ecef953a87cdb5f6572ca08b187bcb/grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0a8c0c4724ae9c2181b7dbc9b186df46e4f62cb18dc184e46d06c0ebeccf569e", size = 2719330 }, + { url = "https://files.pythonhosted.org/packages/63/de/6978f8d10066e240141cd63d1fbfc92818d96bb53427074f47a8eda921e1/grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5782883a27d3fae8c425b29a9d3dcf5f47d992848a1b76970da3b5a28d424b26", size = 3070818 }, + { url = "https://files.pythonhosted.org/packages/74/34/bb8f816893fc73fd6d830e895e8638d65d13642bb7a434f9175c5ca7da11/grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d812daffd0c2d2794756bd45a353f89e55dc8f91eb2fc840c51b9f6be62667", size = 2804993 }, + { url = "https://files.pythonhosted.org/packages/78/60/b2198d7db83293cdb9760fc083f077c73e4c182da06433b3b157a1567d06/grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b47d0dda1bdb0a0ba7a9a6de88e5a1ed61f07fad613964879954961e36d49193", size = 3684915 }, + { url = "https://files.pythonhosted.org/packages/61/20/56dbdc4ecb14d42a03cd164ff45e6e84572bbe61ee59c50c39f4d556a8d5/grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca246dffeca0498be9b4e1ee169b62e64694b0f92e6d0be2573e65522f39eea9", size = 3297482 }, + { url = "https://files.pythonhosted.org/packages/4a/dc/e417a313c905744ce8cedf1e1edd81c41dc45ff400ae1c45080e18f26712/grpcio_tools-1.62.3-cp310-cp310-win32.whl", hash = "sha256:6a56d344b0bab30bf342a67e33d386b0b3c4e65868ffe93c341c51e1a8853ca5", size = 909793 }, + { url = "https://files.pythonhosted.org/packages/d9/69/75e7ebfd8d755d3e7be5c6d1aa6d13220f5bba3a98965e4b50c329046777/grpcio_tools-1.62.3-cp310-cp310-win_amd64.whl", hash = "sha256:710fecf6a171dcbfa263a0a3e7070e0df65ba73158d4c539cec50978f11dad5d", size = 1052459 }, + { url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623 }, + { url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538 }, + { url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964 }, + { url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003 }, + { url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154 }, + { url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942 }, + { url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231 }, + { url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496 }, + { url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690 }, + { url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538 }, + { url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571 }, + { url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207 }, + { url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815 }, + { url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378 }, + { url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416 }, + { url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856 }, +] + +[[package]] +name = "gunicorn" +version = "22.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/88/e2f93c5738a4c1f56a458fc7a5b1676fc31dcdbb182bef6b40a141c17d66/gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63", size = 3639760 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/97/6d610ae77b5633d24b69c2ff1ac3044e0e565ecbd1ec188f02c45073054c/gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9", size = 84443 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "h2" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/32/fec683ddd10629ea4ea46d206752a95a2d8a48c22521edd70b142488efe1/h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb", size = 2145593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e5/db6d438da759efbb488c4f3fbdab7764492ff3c3f953132efa6b9f0e9e53/h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", size = 57488 }, +] + +[[package]] +name = "hpack" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9b/fda93fb4d957db19b0f6b370e79d586b3e8528b20252c729c476a2c02954/hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095", size = 49117 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/34/e8b383f35b77c402d28563d2b8f83159319b509bc5f760b15d60b0abf165/hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", size = 32611 }, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/b0/5e8b8674f8d203335a62fdfcfa0d11ebe09e23613c3391033cbba35f7926/httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61", size = 83234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/d4/e5d7e4f2174f8a4d63c8897d79eb8fe2503f7ecc03282fee1fa2719c2704/httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5", size = 77926 }, +] + +[[package]] +name = "httptools" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/d77686502fced061b3ead1c35a2d70f6b281b5f723c4eff7a2277c04e4a2/httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a", size = 191228 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/6a/80bce0216b63babf51cdc34814c3f0f10489e13ab89fb6bc91202736a8a2/httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f", size = 149778 }, + { url = "https://files.pythonhosted.org/packages/bd/7d/4cd75356dfe0ed0b40ca6873646bf9ff7b5138236c72338dc569dc57d509/httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563", size = 77604 }, + { url = "https://files.pythonhosted.org/packages/4e/74/6348ce41fb5c1484f35184c172efb8854a288e6090bb54e2210598268369/httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58", size = 346717 }, + { url = "https://files.pythonhosted.org/packages/65/e7/dd5ba95c84047118a363f0755ad78e639e0529be92424bb020496578aa3b/httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185", size = 341442 }, + { url = "https://files.pythonhosted.org/packages/d8/97/b37d596bc32be291477a8912bf9d1508d7e8553aa11a30cd871fd89cbae4/httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142", size = 354531 }, + { url = "https://files.pythonhosted.org/packages/99/c9/53ed7176583ec4b4364d941a08624288f2ae55b4ff58b392cdb68db1e1ed/httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658", size = 347754 }, + { url = "https://files.pythonhosted.org/packages/1e/fc/8a26c2adcd3f141e4729897633f03832b71ebea6f4c31cce67a92ded1961/httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b", size = 58165 }, + { url = "https://files.pythonhosted.org/packages/f5/d1/53283b96ed823d5e4d89ee9aa0f29df5a1bdf67f148e061549a595d534e4/httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1", size = 145855 }, + { url = "https://files.pythonhosted.org/packages/80/dd/cebc9d4b1d4b70e9f3d40d1db0829a28d57ca139d0b04197713816a11996/httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0", size = 75604 }, + { url = "https://files.pythonhosted.org/packages/76/7a/45c5a9a2e9d21f7381866eb7b6ead5a84d8fe7e54e35208eeb18320a29b4/httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc", size = 324784 }, + { url = "https://files.pythonhosted.org/packages/59/23/047a89e66045232fb82c50ae57699e40f70e073ae5ccd53f54e532fbd2a2/httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2", size = 318547 }, + { url = "https://files.pythonhosted.org/packages/82/f5/50708abc7965d7d93c0ee14a148ccc6d078a508f47fe9357c79d5360f252/httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837", size = 330211 }, + { url = "https://files.pythonhosted.org/packages/e3/1e/9823ca7aab323c0e0e9dd82ce835a6e93b69f69aedffbc94d31e327f4283/httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d", size = 322174 }, + { url = "https://files.pythonhosted.org/packages/14/e4/20d28dfe7f5b5603b6b04c33bb88662ad749de51f0c539a561f235f42666/httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3", size = 55434 }, + { url = "https://files.pythonhosted.org/packages/60/13/b62e086b650752adf9094b7e62dab97f4cb7701005664544494b7956a51e/httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0", size = 146354 }, + { url = "https://files.pythonhosted.org/packages/f8/5d/9ad32b79b6c24524087e78aa3f0a2dfcf58c11c90e090e4593b35def8a86/httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2", size = 75785 }, + { url = "https://files.pythonhosted.org/packages/d0/a4/b503851c40f20bcbd453db24ed35d961f62abdae0dccc8f672cd5d350d87/httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90", size = 345396 }, + { url = "https://files.pythonhosted.org/packages/a2/9a/aa406864f3108e06f7320425a528ff8267124dead1fd72a3e9da2067f893/httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503", size = 344741 }, + { url = "https://files.pythonhosted.org/packages/cf/3a/3fd8dfb987c4247651baf2ac6f28e8e9f889d484ca1a41a9ad0f04dfe300/httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84", size = 345096 }, + { url = "https://files.pythonhosted.org/packages/80/01/379f6466d8e2edb861c1f44ccac255ed1f8a0d4c5c666a1ceb34caad7555/httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb", size = 343535 }, + { url = "https://files.pythonhosted.org/packages/d3/97/60860e9ee87a7d4712b98f7e1411730520053b9d69e9e42b0b9751809c17/httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949", size = 55660 }, +] + +[[package]] +name = "httpx" +version = "0.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, + { name = "sniffio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, +] + +[[package]] +name = "huggingface-hub" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/18/52812091169325bf609feac958d5612e9c49788155a170352c7d55b6a74c/huggingface_hub-0.25.0.tar.gz", hash = "sha256:fb5fbe6c12fcd99d187ec7db95db9110fb1a20505f23040a5449a717c1a0db4d", size = 365666 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/ce/1f8e61cd63175cc2e79233b954b1c4e85363c788fb3a1fa23c87a25c9b81/huggingface_hub-0.25.0-py3-none-any.whl", hash = "sha256:e2f357b35d72d5012cfd127108c4e14abcd61ba4ebc90a5a374dc2456cb34e12", size = 436429 }, +] + +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, +] + +[[package]] +name = "humanize" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5d/b1/c8f05d5dc8f64030d8cc71e91307c1daadf6ec0d70bcd6eabdfd9b6f153f/humanize-4.10.0.tar.gz", hash = "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978", size = 79192 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/49/a29c79bea335e52fb512a43faf84998c184c87fef82c65f568f8c56f2642/humanize-4.10.0-py3-none-any.whl", hash = "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6", size = 126957 }, +] + +[[package]] +name = "hyperframe" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/2a/4747bff0a17f7281abe73e955d60d80aae537a5d203f417fa1c2e7578ebb/hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914", size = 25008 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/de/85a784bcc4a3779d1753a7ec2dee5de90e18c7bcf402e71b51fcf150b129/hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", size = 12389 }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "importlib-metadata" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/fc/c4e6078d21fc4fa56300a241b87eae76766aa380a23fc450fc85bb7bf547/importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2", size = 52120 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", size = 24409 }, +] + +[[package]] +name = "importlib-resources" +version = "6.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/be/f3e8c6081b684f176b761e6a2fef02a0be939740ed6f54109a2951d806f3/importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065", size = 43372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/6a/4604f9ae2fa62ef47b9de2fa5ad599589d28c9fd1d335f32759813dfa91e/importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717", size = 36115 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "instructor" +version = "1.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "docstring-parser" }, + { name = "jiter" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "pydantic-core" }, + { name = "rich" }, + { name = "tenacity" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/d1/b8c0d24466d653d371ee13de63644db607b32372e799e354bc4e76941e79/instructor-1.3.3.tar.gz", hash = "sha256:e27bf3c1187b0b2130ea38ecde7c2b4f571d6a5ce1397fb15c27490988b45441", size = 40020 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/57/10ec9e31dd67e578d1a1f6ed1cf49c00dbbf9b3d06cd233fd7b35d94872f/instructor-1.3.3-py3-none-any.whl", hash = "sha256:94b114b39a1181fa348d162e6e4ff5c4d985324736020c0233fed5d4db444dbd", size = 50204 }, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appnope", marker = "platform_system == 'Darwin'" }, + { name = "comm" }, + { name = "debugpy" }, + { name = "ipython" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "matplotlib-inline" }, + { name = "nest-asyncio" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173 }, +] + +[[package]] +name = "ipython" +version = "8.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/24/d4fabaca03c8804bf0b8d994c8ae3a20e57e9330d277fb43d83e558dec5e/ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e", size = 5494984 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a2/6c725958e6f135d8e5de081e69841bb2c1d84b3fc259d02eb092b8fc203a/ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c", size = 818986 }, +] + +[[package]] +name = "jedi" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/99/99b493cec4bf43176b678de30f81ed003fd6a647a301b9c927280c600f0a/jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd", size = 1227821 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/9f/bc63f0f0737ad7a60800bfd472a4836661adae21f9c2535f3957b1e54ceb/jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0", size = 1569361 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "jiter" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/7c/030735d07ef00562ae0a6cde7ccebdbc38f26d1d861ed8f5d6364600727f/jiter-0.4.2.tar.gz", hash = "sha256:29b9d44f23f0c05f46d482f4ebf03213ee290d77999525d0975a17f875bf1eea", size = 159226 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/f7/2f9818e31a96c48ef7a5ae177dc973ce00c1f83c54df151c394f4e21da3b/jiter-0.4.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c2b003ff58d14f5e182b875acd5177b2367245c19a03be9a2230535d296f7550", size = 305280 }, + { url = "https://files.pythonhosted.org/packages/06/15/643fafdaaadc15698846ef371dc1fb81389dc75fb1ad855b90afebfc19b5/jiter-0.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48c77c25f094707731cd5bad6b776046846b60a27ee20efc8fadfb10a89415f", size = 314150 }, + { url = "https://files.pythonhosted.org/packages/05/ba/c4bf4ba4e1bad7ff92d4116832cd066eb2bacd6a5ebc4dd8c0a1db380983/jiter-0.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f50ad6b172bde4d45f4d4ea10c49282a337b8bb735afc99763dfa55ea84a743", size = 1083312 }, + { url = "https://files.pythonhosted.org/packages/b9/bb/aaab54db1c57d046d88a105edefd84f248a638caa25c21af6725a2183e26/jiter-0.4.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f6001e86f525fbbc9706db2078dc22be078b0950de55b92d37041930f5f940", size = 1105006 }, + { url = "https://files.pythonhosted.org/packages/18/b1/1c7ecb09b74c33127659b39247aec32976a76d639e75868a2f5a2d653ed9/jiter-0.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16646ef23b62b007de80460d303ebb2d81e355dac9389c787cec87cdd7ffef2f", size = 1118801 }, + { url = "https://files.pythonhosted.org/packages/79/ba/5255c3bfbed0851e6633c760cdf48eaa04ba8c74a9f0e3d5f35e5f2d061d/jiter-0.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b4e847c13b0bf1255c711a92330e7a8cb8b5cdd1e37d7db309627bcdd3367ff", size = 1250179 }, + { url = "https://files.pythonhosted.org/packages/0a/9c/a1b58666553e19dfc206c7f6a9babb25085c019c086475611c44c16f360f/jiter-0.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c536589be60e4c5f2b20fadc4db7e9f55d4c9df3551f29ddf1c4a18dcc9dd54", size = 327554 }, + { url = "https://files.pythonhosted.org/packages/4f/78/f5c4631b9f3185e21604b04a5aa04117fc808c49c8039caf9361f5cc02f1/jiter-0.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3b2763996167830889a854b4ded30bb90897f9b76be78069c50c3ec4540950e", size = 1126361 }, + { url = "https://files.pythonhosted.org/packages/a2/54/67e972fc7f8f8c6c11bd4bb9390e23b5d366bfa0a519b81a478585fb6f0b/jiter-0.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:675e8ab98c99495091af6b6e9bf2b6353bcf81f25ab6ce27d36127e315b4505d", size = 1246472 }, + { url = "https://files.pythonhosted.org/packages/c0/06/da4784c04da2efe625566fc754fa4afd1b8eb4fe15202c8be5e26ac5901e/jiter-0.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e48e43d9d999aaf55f53406b8846ff8cbe3e47ee4b9dc37e5a10a65ce760809f", size = 1250393 }, + { url = "https://files.pythonhosted.org/packages/f3/18/9864c69a7747eeb561680d77e83fcbd6978fbbd788246c4988c22980b8fd/jiter-0.4.2-cp310-none-win32.whl", hash = "sha256:881b6e67c50bc36acb3570eda693763c8cd77d590940e06fa6d325d0da52ec1b", size = 207384 }, + { url = "https://files.pythonhosted.org/packages/d5/9a/7ca923facab92c3274e8617542487103d48d3fab40406e175602679ff5a0/jiter-0.4.2-cp310-none-win_amd64.whl", hash = "sha256:bb8f7b43259efc6add0d721ade2953e064b24e2026d26d979bc09ec080844cef", size = 203863 }, + { url = "https://files.pythonhosted.org/packages/7d/23/90d8045fb8a082e06d6a290f1add325209025b16f681c4e5f779f2d19e95/jiter-0.4.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:24ad336ac47f274fa83f6fbedcabff9d3387c80f67c66b992688e6a8ba2c47e9", size = 304382 }, + { url = "https://files.pythonhosted.org/packages/5e/db/aed8e4fb6961eb60f13c9efb99d681cf308dd2cb651670e167daef87979b/jiter-0.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc392a220095730afe365ce1516f2f88bb085a2fd29ea191be9c6e3c71713d9a", size = 313278 }, + { url = "https://files.pythonhosted.org/packages/4b/06/87213927c2be78c1343e16413f96f75d810ade9c9d0fe29b40ef26757585/jiter-0.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1fdc408de36c81460896de0176f2f7b9f3574dcd35693a0b2c00f4ca34c98e4", size = 1082767 }, + { url = "https://files.pythonhosted.org/packages/6a/9a/e277e9415e8a86de7ab780a671e0e8bc08d897a71a2cff1ceb37186c6915/jiter-0.4.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10ad76722ee6a8c820b0db06a793c08b7d679e5201b9563015bd1e06c959a09", size = 1104911 }, + { url = "https://files.pythonhosted.org/packages/e4/bb/5b9755f2055b2f534a5a753e6060b07aa783e031a77086da74b5106f03b2/jiter-0.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbb46d1e9c82bba87f0cbda38413e49448a7df35b1e55917124bff9f38974a23", size = 1118984 }, + { url = "https://files.pythonhosted.org/packages/42/b0/78f5434279e9fdadc604332fb5cb061f7d0c483771ff57e602b5e053be3c/jiter-0.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:194e28ef4b5f3b61408cb2ee6b6dcbcdb0c9063d01b92b01345b7605692849f5", size = 1250373 }, + { url = "https://files.pythonhosted.org/packages/de/b3/eaf649816fe36ef7eece53c2ebbf12b9acad3f798e32c9895a3f9ffd5d5e/jiter-0.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0a447533eccd62748a727e058efa10a8d7cf1de8ffe1a4d705ecb41dad9090", size = 327465 }, + { url = "https://files.pythonhosted.org/packages/85/85/9a5415c3e0dbaf9063142c532c78f5dcf116aa9df5f205a0bd0d010a54ba/jiter-0.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5f7704d7260bbb88cca3453951af739589132b26e896a3144fa2dae2263716d7", size = 1126402 }, + { url = "https://files.pythonhosted.org/packages/22/db/c44176ef6b976ab9018cb4fdf2ab0b6f263864ce19ded02d9dee1b5a47c5/jiter-0.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01427458bc9550f2eda09d425755330e7d0eb09adce099577433bebf05d28d59", size = 1247009 }, + { url = "https://files.pythonhosted.org/packages/b1/5f/4f702e03dc8569a21fb859abba2595798dad7dcd57e87036c7db98da6148/jiter-0.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159b8416879c0053b17c352f70b67b749ef5b2924c6154318ecf71918aab0905", size = 1250364 }, + { url = "https://files.pythonhosted.org/packages/1d/b1/85a340bbdd72f936a4824d6d5abdc16940bae2eb2d37e9d8302451a04714/jiter-0.4.2-cp311-none-win32.whl", hash = "sha256:f2445234acfb79048ce1a0d5d0e181abb9afd9e4a29d8d9988fe26cc5773a81a", size = 207001 }, + { url = "https://files.pythonhosted.org/packages/da/f1/6f4c976b4354fe15125814717a7718701135a5e07058aaec43f0e972a94d/jiter-0.4.2-cp311-none-win_amd64.whl", hash = "sha256:e15a65f233b6b0e5ac10ddf3b97ceb18aa9ffba096259961641d78b4ee321bd5", size = 201695 }, + { url = "https://files.pythonhosted.org/packages/ab/6f/20865d95b8f0159426ff55d125a2dcd920f04aead696458922d226096241/jiter-0.4.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d61d59521aea9745447ce50f74d39a16ef74ec9d6477d9350d77e75a3d774ad2", size = 303435 }, + { url = "https://files.pythonhosted.org/packages/13/e9/d1eae3fe386a7f8f9a210d19e18bbd4afe44eee73a9e08a424c260c0a365/jiter-0.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eef607dc0acc251923427808dbd017f1998ae3c1a0430a261527aa5cbb3a942", size = 312045 }, + { url = "https://files.pythonhosted.org/packages/ce/57/49be4b56f4ad7ff7547f378aa9268fc2d250f029f77f286a5131f010a0a5/jiter-0.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af6bf39954646e374fc47429c656372ac731a6a26b644158a5a84bcdbed33a47", size = 1080203 }, + { url = "https://files.pythonhosted.org/packages/42/44/583fdcdb8d8e0e73d5b7d129119cda67f92bde72f8f51518f4883a53e6e3/jiter-0.4.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f509d23606e476852ee46a2b65b5c4ad3905f17424d9cc19c1dffa1c94ba3c6", size = 1103879 }, + { url = "https://files.pythonhosted.org/packages/31/79/f1c0bae2a14a8f27d31121b6e725ca0d6e73066c4bc4dde98b0b06bde44a/jiter-0.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59672774daa44ee140aada0c781c82bee4d9ac5e522966186cfb6b3c217d8a51", size = 1116790 }, + { url = "https://files.pythonhosted.org/packages/67/2f/33e51effdd869eec6cdd7270ca52b5edc2e156fef94a6752750c619127a2/jiter-0.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a0458efac5afeca254cf557b8a654e17013075a69905c78f88d557f129d871", size = 1240021 }, + { url = "https://files.pythonhosted.org/packages/cc/82/a000e84840d6483fe2afdbb6fd78122ba589a9df387b79c3207d76a5c2da/jiter-0.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8860766d1c293e75c1bb4e25b74fa987e3adf199cac3f5f9e6e49c2bebf092f", size = 326243 }, + { url = "https://files.pythonhosted.org/packages/85/3f/7801819d7f15a931993aecdc94289ca141846cb6ececac8d70a78da86d02/jiter-0.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a109f3281b72bbf4921fe43db1005c004a38559ca0b6c4985add81777dfe0a44", size = 1125380 }, + { url = "https://files.pythonhosted.org/packages/26/4a/d77d4be6aa2ac992e5f6cc53b123346ad7a2f756a3d9af981116a6dc42da/jiter-0.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:faa7e667454b77ad2f0ef87db39f4944de759617aadf210ea2b73f26bb24755f", size = 1245803 }, + { url = "https://files.pythonhosted.org/packages/df/af/e28da40333afdee984d43420fdb5b35bdd00beb9efd4e31696a893372a2b/jiter-0.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3512f8b00cafb6780b427cb6282800d2bf8277161d9c917830661bd4ed1d3528", size = 1249226 }, + { url = "https://files.pythonhosted.org/packages/31/18/442533b5b1d33473cfe69ca93136f6c109cff4c8c5a0b710b9a8f0606fe9/jiter-0.4.2-cp312-none-win32.whl", hash = "sha256:853b35d508ee5b66d06630473c1c0b7bb5e29bf4785c9d2202437116c94f7e21", size = 208283 }, + { url = "https://files.pythonhosted.org/packages/a3/9e/97cfe748420a5290893113b44d82f6415870269d61a3469bf96eac53886e/jiter-0.4.2-cp312-none-win_amd64.whl", hash = "sha256:4a3a8197784278eb8b24cb02c45e1cad67c2ce5b5b758adfb19b87f74bbdff9c", size = 200023 }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + +[[package]] +name = "joblib" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/64/33/60135848598c076ce4b231e1b1895170f45fbcaeaa2c9d5e38b04db70c35/joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e", size = 2116621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/29/df4b9b42f2be0b623cbd5e2140cafcaa2bef0759a00b7b70104dcfe2fb51/joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6", size = 301817 }, +] + +[[package]] +name = "jq" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/32/3eaca3ac81c804d6849da2e9f536ac200f4ad46a696890854c1f73b2f749/jq-1.8.0.tar.gz", hash = "sha256:53141eebca4bf8b4f2da5e44271a8a3694220dfd22d2b4b2cfb4816b2b6c9057", size = 2058265 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/ec/f72f8b0272b2d92c99cb33af70833e51af1bf673db39214948aa85699b48/jq-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:628848f92a0f24f5ca50c879d271555a63bf28746c1efd3571ee49e9a357b602", size = 416542 }, + { url = "https://files.pythonhosted.org/packages/d5/3c/3b781ae9f4f0dd24e75c0005d3a886b0ae55a684562206a4fd33fdc318c3/jq-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d375b0f372df24087fd0688ef85fef43a44a3e382a82afcc0cdfdfe59e59d313", size = 422189 }, + { url = "https://files.pythonhosted.org/packages/ad/b8/2ea11152a3546803bfad5a8ef78b6f4cbfbfe75a7455c6f662728167c09f/jq-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd0c30af5257ae0dccd27c5140726e24108a472e56dce8767b918905adfd9c99", size = 719303 }, + { url = "https://files.pythonhosted.org/packages/71/5d/3d252898f6163143b8def254b53e626b3f8cfb12c3dddcfacb796a7e396b/jq-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59bda8b62453967a32f418562309d0ffe0da73227e8c5800334ee0b515c5d2e2", size = 737355 }, + { url = "https://files.pythonhosted.org/packages/74/6c/85c477f133ee96de376070ee12991a81e7f83300d607203724633dd5ae69/jq-1.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05e2c0a8944a3ff93de6353d60ed69fa85b155c08d6776ab20d4429197f50050", size = 727894 }, + { url = "https://files.pythonhosted.org/packages/07/c2/f0d8b7c9669ff17a57e54da469515e6d2badc6ed2b038792162b449aa168/jq-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2526368e5658eaeb47984b551e7178a0216cc8c5fdd6dd343964574cae513c89", size = 697960 }, + { url = "https://files.pythonhosted.org/packages/26/16/28b277d52125cbb2681063c875a178a1d11d8f0b7884f5f54b0418219587/jq-1.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:881be44d8f804a97a1e37dc6360bf2deab43768d7fbb31cfb22ca8050dd6aed3", size = 722986 }, + { url = "https://files.pythonhosted.org/packages/fe/eb/62b9f6e3bbc4f2a05b392b1d1a4603fc927746d9e33f5c8d24edcfd7d429/jq-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f057322a572fe2cf0cb9ea068dd4eec237bc15490e0944cd979aeb23b20db3ac", size = 725489 }, + { url = "https://files.pythonhosted.org/packages/da/95/dcbef114d8b71d52def6f5ea7a04f892f18803d52e0aaf3d4e6393dcb7d4/jq-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8441fe181af789a05b742930d095ee61fc251fdd2b975c68e359ac7e85a4c2d", size = 416862 }, + { url = "https://files.pythonhosted.org/packages/3b/c9/06f04189aa5265827228a31ab531712c5b6345c177988d7e1397b0cb18f7/jq-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e687ef4b360e7436c3b5f15ee25f2570bcbcadccb940ebbc80ebe4b05b91ee2", size = 422413 }, + { url = "https://files.pythonhosted.org/packages/0c/77/6a55ae6d41f6298245dc45271a10b319c91eb3176a5fe0b6edd74e4031fb/jq-1.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf862d1bc1d0095aef0efc76f8cef0da7ab996f2b9d34c5067e48427a069ea3", size = 731532 }, + { url = "https://files.pythonhosted.org/packages/d3/fe/b7786c4cbf8ff4fd0a9b5273a30ee65a91c6f1bf38414e989a117ccd5c71/jq-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190fd2bf92b7abec3090a1f68db40cd001178e84c42754f75253ee1f9c17dfdf", size = 746597 }, + { url = "https://files.pythonhosted.org/packages/43/1b/a2ce5bed9984eb98953184f8b4fea99798996631166f06e60cd5a9db8c51/jq-1.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ecba9f181e7810a336a520f32df998e6ecc9fdebac80c6a636e402baa939e79", size = 739586 }, + { url = "https://files.pythonhosted.org/packages/13/e4/4b0cff04095fb40ba279beb10746a445fa55755784a2546017e6975e1280/jq-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8b6322f647f9e1d7be7f6e8203106f4ff1b7c0e07c9023607c7414e1dc098b67", size = 722756 }, + { url = "https://files.pythonhosted.org/packages/63/63/e93d730108fc0651fbe47ed7f3a52ba134292523ae5f162cfb30e3020b74/jq-1.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7bed3b9cc53d72383fc558cfe03345735e7532d1733a5ed3c2196f1eec1c26d7", size = 746574 }, + { url = "https://files.pythonhosted.org/packages/05/bc/bc890164f63371dcf90ac1d3383d0f11eefc8ec1ff649407cbd3393f530d/jq-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a01261e4df11d3a0fe42fece73bb458d2e4a33b481d67e5e817acec8b0e923d", size = 749311 }, + { url = "https://files.pythonhosted.org/packages/45/b3/dd0d41cecb0d8712bc792b3c40b42a36c355d814d61f6bda4d61cbb188e5/jq-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14f5988ae3604ebfdba2da398f9bd941bb3a72144a2831cfec2bc22bd23d5563", size = 415943 }, + { url = "https://files.pythonhosted.org/packages/9b/2c/39df803632c7222e9cd6922101966ddbec05d1c4213e7923c95e4e442666/jq-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f8903b66fac9f46de72b3a2f69bfa3c638a7a8d52610d1894df87ef0a9e4d2d3", size = 422267 }, + { url = "https://files.pythonhosted.org/packages/3a/b3/ddc1e691b832c6aa0f5142935099c1f05a89ff2f337201e2dcfafc726ec9/jq-1.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccda466f5722fa9be789099ce253bfc177e49f9a981cb7f5b6369ea37041104", size = 729142 }, + { url = "https://files.pythonhosted.org/packages/c5/b9/42a55d08397d25b4b1f6580f58c59ba3e3e120270db2e75923644ccc0d29/jq-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f57649e84a09b334eeb80d22ecc96ff7b31701f3f818ef14cb8bb162c84863", size = 748871 }, + { url = "https://files.pythonhosted.org/packages/90/4f/83639fdae641b7e8095b4a51d87a3da46737e70570d9df14d99ea15a0b16/jq-1.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7453731008eb7671725222781eb7bc5ed96e80fc9a652d177cb982276d3e08b4", size = 735908 }, + { url = "https://files.pythonhosted.org/packages/f7/9f/f54c2050b21490201613a7328534d2cb0c34e5a547167849a1464d89ae3e/jq-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:917812663613fc0542117bbe7ec43c8733b0c6bb174db6be06a15fc612de3b70", size = 721970 }, + { url = "https://files.pythonhosted.org/packages/24/b0/6c9a14ef103df4208e032bce25e66293201dacac18689d2ec4c0e68c8b77/jq-1.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ec9e4db978237470e9d65f747eb459f4ffee576c9c9f8ca92ab32d5687a46e4a", size = 746825 }, + { url = "https://files.pythonhosted.org/packages/f4/67/4eb836a9eac5f02983ed7caf76c4d0cad32fdd6ae08176be892b3a6b3d17/jq-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9f2548c83473bbe88a32a0735cb949a5d01804f8d411efae5342b5d23be8a2f", size = 751186 }, + { url = "https://files.pythonhosted.org/packages/9c/25/c73afa16aedee3ae87b2e8ffb2d12bdb9c7a34a8c9ab5038318cb0b431fe/jq-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aea6161c4d975230e85735c0214c386e66035e96cfc4fd69159e87f46c09d4", size = 415000 }, + { url = "https://files.pythonhosted.org/packages/06/97/d09338697ea0eb7386a3df0c6ca2a77ab090c19420a85acdc6f36971c6b8/jq-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c24a5f9e3807e277e19f305c8bcd0665b8b89251b053903f611969657680722", size = 421253 }, + { url = "https://files.pythonhosted.org/packages/b8/c3/d020c19eca167b5085e74d2277bc3d9e35d1b4ee5bcb9076f1e26882514d/jq-1.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb484525dd801583ebd695d02f9165445a4d1b2fb560b187e6fc654911f0600e", size = 725885 }, + { url = "https://files.pythonhosted.org/packages/78/b8/8f6b886856f52f3277663d2d7a199663c6ede589dd0714aac9491b82ba6e/jq-1.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddd9abdf0c1b30be1bf853d8c52187c96a51b2cbc05f40c43a37bf6a9b956807", size = 746334 }, + { url = "https://files.pythonhosted.org/packages/76/c2/2fa34e480068863ab372ec91c59b10214e9f8f3ae8b6e2de61456e93bae1/jq-1.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7464d9b88c74a7119b53f4bbf88028d07a9de9a1a279e45209b763b89d6582", size = 733716 }, + { url = "https://files.pythonhosted.org/packages/2e/db/59cb84ec59247af7f7bedd2b5c88b3a4ca17253fd2cc0d40f08573f7ff72/jq-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b99761e8ec2cedb9906df4ceae33f467a377621019ef40a9a275689ac3577456", size = 720978 }, + { url = "https://files.pythonhosted.org/packages/e0/6f/d04bdcc037ced716e2522ebf7a677541b8654d7855cd1404d894f1ecd144/jq-1.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1be1638f9d5f38c83440fb9626d8f78905ed5d70e926e3a664d3de1198e1ef79", size = 746431 }, + { url = "https://files.pythonhosted.org/packages/84/52/f100fb2ccd467c17a2ecc186334aa7b512e49ca1a678ecc53dd4defd6e22/jq-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d7e82d58bf3afe373afb3a01f866e473bbd34f38377a2f216c6222ec028eeea", size = 750404 }, + { url = "https://files.pythonhosted.org/packages/10/3a/d8350a87cf73e66d7252020c31e50e0a5fedc00b343676e0ec1075399312/jq-1.8.0-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:e14aa012606470d1a21fdc39835b8eef395f7ea143c720940a48156de94752e9", size = 401438 }, + { url = "https://files.pythonhosted.org/packages/95/3f/9f840980d6390b7eacb2a1d3e17c1edf9b0757571c93f801c48f5f494c58/jq-1.8.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:353db01bbb964eff9e39c8966e7c123cbdad1ff59cc3bee773a7a2034e2b843b", size = 410079 }, + { url = "https://files.pythonhosted.org/packages/9f/2e/70c61f02fc6307bcb2e079c8aa950eba9caf654c52473955d541261cf091/jq-1.8.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325480cba94f504b282f42912a16b32d94dd1e6347cf3a367ec3c97fe1dd1b3a", size = 409938 }, + { url = "https://files.pythonhosted.org/packages/ae/75/04cb177d21afdbe5e31e2e2e1ae9ef6df651dd5668187090121ca179d147/jq-1.8.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4a79e94c83ebde789ff54e609f19b1923b2f57b2bd17ccb4953713577d4c3dc", size = 424088 }, + { url = "https://files.pythonhosted.org/packages/1f/b6/07b8ca4cd626eca4491c9f055f406d9a45375d7fcb75a877cb25bc88f023/jq-1.8.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc7ebcc1037c8a82db30aff9177f17379bcc91734def09548e939326717fd82d", size = 435591 }, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpointer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898 }, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595 }, +] + +[[package]] +name = "jsonref" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/0d/c1f3277e90ccdb50d33ed5ba1ec5b3f0a242ed8c1b1a85d3afeb68464dca/jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552", size = 8814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/ec/e1db9922bceb168197a558a2b8c03a7963f1afe93517ddd3cf99f202f996/jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9", size = 9425 }, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-core" }, + { name = "python-dateutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 }, +] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "platformdirs" }, + { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 }, +] + +[[package]] +name = "kombu" +version = "5.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "amqp" }, + { name = "tzdata" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/4d/b93fcb353d279839cc35d0012bee805ed0cf61c07587916bfc35dbfddaf1/kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf", size = 442858 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/ec/7811a3cf9fdfee3ee88e54d08fcbc3fabe7c1b6e4059826c59d7b795651c/kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763", size = 201349 }, +] + +[[package]] +name = "kubernetes" +version = "31.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "durationpy" }, + { name = "google-auth" }, + { name = "oauthlib" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-oauthlib" }, + { name = "six" }, + { name = "urllib3" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/bd/ffcd3104155b467347cd9b3a64eb24182e459579845196b3a200569c8912/kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0", size = 916096 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/a8/17f5e28cecdbd6d48127c22abdb794740803491f422a11905c4569d8e139/kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1", size = 1857013 }, +] + +[[package]] +name = "langchain" +version = "0.2.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "langchain-core" }, + { name = "langchain-text-splitters" }, + { name = "langsmith" }, + { name = "numpy" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/53/8ebf21de8d17e7e0f0998f28d689f60d7ed420acb7ab2fba59ca04e80e54/langchain-0.2.16.tar.gz", hash = "sha256:ffb426a76a703b73ac69abad77cd16eaf03dda76b42cff55572f592d74944166", size = 414668 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/29/635343c0d155997569b544d26da5a2a9ebade2423baffc9cd6066b01a386/langchain-0.2.16-py3-none-any.whl", hash = "sha256:8f59ee8b45f268df4b924ea3b9c63e49286efa756d16b3f6a9de5c6e502c36e1", size = 1001195 }, +] + +[[package]] +name = "langchain-cohere" +version = "0.1.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cohere" }, + { name = "langchain-core" }, + { name = "langchain-experimental" }, + { name = "pandas" }, + { name = "tabulate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/a9/30462b68f8c15da886078fe5c96fab3085241168ea03d968eee1182e00a9/langchain_cohere-0.1.9.tar.gz", hash = "sha256:549620d23bc3d77f62d1045787095fe2c1cfa233dba69455139f9a2f65f952fa", size = 29987 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/b1/ee8d44898cfe43703f05a0ffd95294d3ebe4c61879f19c6357c860131312/langchain_cohere-0.1.9-py3-none-any.whl", hash = "sha256:96d6a15125797319474ac84b54024e5024f3f5fc45032ebf228d95d6998c9b13", size = 35218 }, +] + +[[package]] +name = "langchain-community" +version = "0.2.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "dataclasses-json" }, + { name = "langchain" }, + { name = "langchain-core" }, + { name = "langsmith" }, + { name = "numpy" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/54/be928e3962d24b40c31899f5c5ed99b0c7ef7c3bb7601eb2fe7a6ce75dc4/langchain_community-0.2.17.tar.gz", hash = "sha256:b0745c1fcf1bd532ed4388f90b47139d6a6c6ba48a87aa68aa32d4d6bb97259d", size = 1589425 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/33/c6ee472412f751062311075bb391a7870ab57cdb8da5d47f359895b2d3c2/langchain_community-0.2.17-py3-none-any.whl", hash = "sha256:d07c31b641e425fb8c3e7148ad6a62e1b54a9adac6e1173021a7dd3148266063", size = 2339964 }, +] + +[[package]] +name = "langchain-core" +version = "0.2.41" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpatch" }, + { name = "langsmith" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tenacity" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/92/2ad97f0c23b5ee5043df1a93d97edd4404136003e7d22b641de081738408/langchain_core-0.2.41.tar.gz", hash = "sha256:bc12032c5a298d85be754ccb129bc13ea21ccb1d6e22f8d7ba18b8da64315bb5", size = 316952 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/02/2b2cf9550cee1a7ffa42fe60c55e2d0e7d397535609b42562611fb40e78d/langchain_core-0.2.41-py3-none-any.whl", hash = "sha256:3278fda5ba9a05defae8bb19f1226032add6aab21917db7b3bc74e750e263e84", size = 397013 }, +] + +[[package]] +name = "langchain-experimental" +version = "0.0.61" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-community" }, + { name = "langchain-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/1e/437941420dbf5230344da376961ad4497ae91bea1f7ce46ee389310529e3/langchain_experimental-0.0.61.tar.gz", hash = "sha256:e9538efb994be5db3045cc582cddb9787c8299c86ffeee9d3779b7f58eef2226", size = 135958 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/9b/1bc4df69a1f92e3e96020d22c2a0850e7df566a6770bb365255f5355cd26/langchain_experimental-0.0.61-py3-none-any.whl", hash = "sha256:f9c516f528f55919743bd56fe1689a53bf74ae7f8902d64b9d8aebc61249cbe2", size = 202515 }, +] + +[[package]] +name = "langchain-openai" +version = "0.1.25" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "openai" }, + { name = "tiktoken" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/cb/98fe365f2e5eee39d0130279959a84182ab414879b666ffc2b9d69b95633/langchain_openai-0.1.25.tar.gz", hash = "sha256:eb116f744f820247a72f54313fb7c01524fba0927120d4e899e5e4ab41ad3928", size = 45224 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/2e/a4430cad7a98e29e9612648f8b12d7449ab635a742c19bf1d62f8713ecaa/langchain_openai-0.1.25-py3-none-any.whl", hash = "sha256:f0b34a233d0d9cb8fce6006c903e57085c493c4f0e32862b99063b96eaedb109", size = 51550 }, +] + +[[package]] +name = "langchain-text-splitters" +version = "0.2.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/b3/b1ccde47c86c5fe2585dc012555cff7949c556bd6993dd9c09e49a356190/langchain_text_splitters-0.2.4.tar.gz", hash = "sha256:f7daa7a3b0aa8309ce248e2e2b6fc8115be01118d336c7f7f7dfacda0e89bf29", size = 20236 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/f3/d01591229e9d0eec1e8106ed6f9b670f299beb1c94fed4aa335afa78acb0/langchain_text_splitters-0.2.4-py3-none-any.whl", hash = "sha256:2702dee5b7cbdd595ccbe43b8d38d01a34aa8583f4d6a5a68ad2305ae3e7b645", size = 25552 }, +] + +[[package]] +name = "langchainhub" +version = "0.1.21" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "requests" }, + { name = "types-requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/50/76719d356d80b00761d9680e3eb5df0f5ca8587e79b60ae6dcb678828cdd/langchainhub-0.1.21.tar.gz", hash = "sha256:723383b3964a47dbaea6ad5d0ef728accefbc9d2c07480e800bdec43510a8c10", size = 4481 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/63/40328157ddee807991f2f1992c2ad88f479b2472dc9e40d08ccf10700735/langchainhub-0.1.21-py3-none-any.whl", hash = "sha256:1cc002dc31e0d132a776afd044361e2b698743df5202618cf2bad399246b895f", size = 5203 }, +] + +[[package]] +name = "langflow-base" +version = "0.0.96" +source = { editable = "." } +dependencies = [ + { name = "aiofiles" }, + { name = "alembic" }, + { name = "asyncer" }, + { name = "bcrypt" }, + { name = "cachetools" }, + { name = "chardet" }, + { name = "clickhouse-connect" }, + { name = "crewai" }, + { name = "cryptography" }, + { name = "diskcache" }, + { name = "docstring-parser" }, + { name = "duckdb" }, + { name = "emoji" }, + { name = "fastapi" }, + { name = "filelock" }, + { name = "firecrawl-py" }, + { name = "grandalf" }, + { name = "gunicorn" }, + { name = "httpx" }, + { name = "jq", marker = "sys_platform != 'win32'" }, + { name = "langchain" }, + { name = "langchain-core" }, + { name = "langchain-experimental" }, + { name = "langchainhub" }, + { name = "loguru" }, + { name = "multiprocess" }, + { name = "nanoid" }, + { name = "nest-asyncio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-prometheus" }, + { name = "opentelemetry-instrumentation-fastapi" }, + { name = "opentelemetry-sdk" }, + { name = "orjson" }, + { name = "pandas" }, + { name = "passlib" }, + { name = "pillow" }, + { name = "platformdirs" }, + { name = "prometheus-client" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pypdf" }, + { name = "pyperclip" }, + { name = "python-docx" }, + { name = "python-jose" }, + { name = "python-multipart" }, + { name = "rich" }, + { name = "sentry-sdk", extra = ["fastapi", "loguru"] }, + { name = "setuptools" }, + { name = "spider-client" }, + { name = "sqlmodel" }, + { name = "typer" }, + { name = "uncurl" }, + { name = "uvicorn" }, + { name = "websockets" }, +] + +[package.optional-dependencies] +all = [ + { name = "celery" }, + { name = "ctransformers" }, + { name = "flower" }, + { name = "llama-cpp-python" }, + { name = "redis" }, + { name = "sentence-transformers" }, +] +deploy = [ + { name = "celery" }, + { name = "flower" }, + { name = "redis" }, +] +dev = [ + { name = "devtools" }, + { name = "dictdiffer" }, + { name = "httpx" }, + { name = "ipykernel" }, + { name = "mypy" }, + { name = "pandas-stubs" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-flakefinder" }, + { name = "pytest-instafail" }, + { name = "pytest-mock" }, + { name = "pytest-profiling" }, + { name = "pytest-split" }, + { name = "pytest-sugar" }, + { name = "pytest-xdist" }, + { name = "requests" }, + { name = "respx" }, + { name = "ruff" }, + { name = "types-google-cloud-ndb" }, + { name = "types-markdown" }, + { name = "types-passlib" }, + { name = "types-pillow" }, + { name = "types-python-jose" }, + { name = "types-pywin32" }, + { name = "types-pyyaml" }, + { name = "types-redis" }, + { name = "types-requests" }, + { name = "vulture" }, +] +local = [ + { name = "ctransformers" }, + { name = "llama-cpp-python" }, + { name = "sentence-transformers" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = ">=24.1.0" }, + { name = "alembic", specifier = ">=1.13.0" }, + { name = "asyncer", specifier = ">=0.0.5" }, + { name = "bcrypt", specifier = "==4.0.1" }, + { name = "cachetools", specifier = ">=5.3.1" }, + { name = "celery", marker = "extra == 'all'" }, + { name = "celery", marker = "extra == 'deploy'" }, + { name = "chardet", specifier = ">=5.2.0" }, + { name = "clickhouse-connect", specifier = "==0.7.19" }, + { name = "crewai", specifier = ">=0.36.0" }, + { name = "cryptography", specifier = ">=42.0.5,<44.0.0" }, + { name = "ctransformers", marker = "extra == 'all'" }, + { name = "ctransformers", marker = "extra == 'local'" }, + { name = "devtools", marker = "extra == 'dev'", specifier = ">=0.12.2" }, + { name = "dictdiffer", marker = "extra == 'dev'", specifier = ">=0.9.0" }, + { name = "diskcache", specifier = ">=5.6.3" }, + { name = "docstring-parser", specifier = ">=0.16" }, + { name = "duckdb", specifier = ">=1.0.0" }, + { name = "emoji", specifier = ">=2.12.0" }, + { name = "fastapi", specifier = ">=0.111.0" }, + { name = "filelock", specifier = ">=3.15.4" }, + { name = "firecrawl-py", specifier = ">=0.0.16" }, + { name = "flower", marker = "extra == 'all'" }, + { name = "flower", marker = "extra == 'deploy'" }, + { name = "grandalf", specifier = ">=0.8.0" }, + { name = "gunicorn", specifier = ">=22.0.0" }, + { name = "httpx" }, + { name = "httpx", marker = "extra == 'dev'" }, + { name = "ipykernel", marker = "extra == 'dev'", specifier = ">=6.29.0" }, + { name = "jq", marker = "sys_platform != 'win32'", specifier = ">=1.7.0" }, + { name = "langchain", specifier = "~=0.2.0" }, + { name = "langchain-core", specifier = ">=0.2.32" }, + { name = "langchain-experimental", specifier = ">=0.0.61" }, + { name = "langchainhub", specifier = "~=0.1.15" }, + { name = "llama-cpp-python", marker = "extra == 'all'" }, + { name = "llama-cpp-python", marker = "extra == 'local'" }, + { name = "loguru", specifier = ">=0.7.1" }, + { name = "multiprocess", specifier = ">=0.70.14" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.11.0" }, + { name = "nanoid", specifier = ">=2.0.0" }, + { name = "nest-asyncio", specifier = ">=1.6.0" }, + { name = "opentelemetry-api", specifier = ">=1.25.0" }, + { name = "opentelemetry-exporter-prometheus", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-fastapi", specifier = ">=0.46b0" }, + { name = "opentelemetry-sdk", specifier = ">=1.25.0" }, + { name = "orjson", specifier = "==3.10.0" }, + { name = "pandas", specifier = "==2.2.2" }, + { name = "pandas-stubs", marker = "extra == 'dev'", specifier = ">=2.1.4.231227" }, + { name = "passlib", specifier = ">=1.7.4" }, + { name = "pillow", specifier = ">=10.2.0" }, + { name = "platformdirs", specifier = ">=4.2.0" }, + { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.7.0" }, + { name = "prometheus-client", specifier = ">=0.20.0" }, + { name = "pydantic", specifier = ">=2.7.0" }, + { name = "pydantic-settings", specifier = ">=2.2.0" }, + { name = "pypdf", specifier = ">=4.2.0" }, + { name = "pyperclip", specifier = ">=1.8.2" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.2.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=5.0.0" }, + { name = "pytest-flakefinder", marker = "extra == 'dev'", specifier = ">=1.1.0" }, + { name = "pytest-instafail", marker = "extra == 'dev'", specifier = ">=0.5.0" }, + { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.14.0" }, + { name = "pytest-profiling", marker = "extra == 'dev'", specifier = ">=1.7.0" }, + { name = "pytest-split", marker = "extra == 'dev'", specifier = ">=0.9.0" }, + { name = "pytest-sugar", marker = "extra == 'dev'", specifier = ">=1.0.0" }, + { name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.6.0" }, + { name = "python-docx", specifier = ">=1.1.0" }, + { name = "python-jose", specifier = ">=3.3.0" }, + { name = "python-multipart", specifier = ">=0.0.7" }, + { name = "redis", marker = "extra == 'all'" }, + { name = "redis", marker = "extra == 'deploy'" }, + { name = "requests", marker = "extra == 'dev'", specifier = ">=2.32.0" }, + { name = "respx", marker = "extra == 'dev'", specifier = ">=0.21.1" }, + { name = "rich", specifier = ">=13.7.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.4.5" }, + { name = "sentence-transformers", marker = "extra == 'all'" }, + { name = "sentence-transformers", marker = "extra == 'local'" }, + { name = "sentry-sdk", extras = ["fastapi", "loguru"], specifier = ">=2.5.1" }, + { name = "setuptools", specifier = ">=70" }, + { name = "spider-client", specifier = ">=0.0.27" }, + { name = "sqlmodel", specifier = "==0.0.18" }, + { name = "typer", specifier = ">=0.12.0" }, + { name = "types-google-cloud-ndb", marker = "extra == 'dev'", specifier = ">=2.2.0.0" }, + { name = "types-markdown", marker = "extra == 'dev'", specifier = ">=3.7.0.20240822" }, + { name = "types-passlib", marker = "extra == 'dev'", specifier = ">=1.7.7.13" }, + { name = "types-pillow", marker = "extra == 'dev'", specifier = ">=10.2.0.20240213" }, + { name = "types-python-jose", marker = "extra == 'dev'", specifier = ">=3.3.4.8" }, + { name = "types-pywin32", marker = "extra == 'dev'", specifier = ">=306.0.0.4" }, + { name = "types-pyyaml", marker = "extra == 'dev'", specifier = ">=6.0.12.8" }, + { name = "types-redis", marker = "extra == 'dev'", specifier = ">=4.6.0.5" }, + { name = "types-requests", marker = "extra == 'dev'", specifier = ">=2.32.0" }, + { name = "uncurl", specifier = ">=0.0.11" }, + { name = "uvicorn", specifier = ">=0.30.0" }, + { name = "vulture", marker = "extra == 'dev'", specifier = ">=2.11" }, + { name = "websockets" }, +] + +[[package]] +name = "langsmith" +version = "0.1.125" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, + { name = "pydantic" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/f6/fd8335b6702d7da0b22c555d34af1feedf44725d60e209a09bcb15a8ec50/langsmith-0.1.125.tar.gz", hash = "sha256:2c0eb0c3cbf22cff55bf519b8e889041f9a591bcf97af5152c8e130333c5940e", size = 281537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/42/afd1d1b5946d36746c0d3445259e9cb59ab8aa7775fddca5a7179d9be8d4/langsmith-0.1.125-py3-none-any.whl", hash = "sha256:74ce8eb2663e1ed20bfcfc88d41e0712879306956c9938d1cdbab7d60458bdca", size = 290193 }, +] + +[[package]] +name = "llama-cpp-python" +version = "0.2.90" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "diskcache" }, + { name = "jinja2" }, + { name = "numpy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/19/89836022affc1bf470e2485e28872b489254a66fe587155edba731a07112/llama_cpp_python-0.2.90.tar.gz", hash = "sha256:419b041c62dbdb9f7e67883a6ef2f247d583d08417058776be0bff05b4ec9e3d", size = 63762953 } + +[[package]] +name = "loguru" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "win32-setctime", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/30/d87a423766b24db416a46e9335b9602b054a72b96a88a241f2b09b560fa8/loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac", size = 145103 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/0a/4f6fed21aa246c6b49b561ca55facacc2a44b87d65b8b92362a8e99ba202/loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb", size = 62549 }, +] + +[[package]] +name = "lxml" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/6b/20c3a4b24751377aaa6307eb230b66701024012c29dd374999cc92983269/lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f", size = 3679318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ce/2789e39eddf2b13fac29878bfa465f0910eb6b0096e29090e5176bc8cf43/lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656", size = 8124570 }, + { url = "https://files.pythonhosted.org/packages/24/a8/f4010166a25d41715527129af2675981a50d3bbf7df09c5d9ab8ca24fbf9/lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d", size = 4413042 }, + { url = "https://files.pythonhosted.org/packages/41/a4/7e45756cecdd7577ddf67a68b69c1db0f5ddbf0c9f65021ee769165ffc5a/lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a", size = 5139213 }, + { url = "https://files.pythonhosted.org/packages/02/e2/ecf845b12323c92748077e1818b64e8b4dba509a4cb12920b3762ebe7552/lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8", size = 4838814 }, + { url = "https://files.pythonhosted.org/packages/12/91/619f9fb72cf75e9ceb8700706f7276f23995f6ad757e6d400fbe35ca4990/lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330", size = 5425084 }, + { url = "https://files.pythonhosted.org/packages/25/3b/162a85a8f0fd2a3032ec3f936636911c6e9523a8e263fffcfd581ce98b54/lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965", size = 4875993 }, + { url = "https://files.pythonhosted.org/packages/43/af/dd3f58cc7d946da6ae42909629a2b1d5dd2d1b583334d4af9396697d6863/lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22", size = 5012462 }, + { url = "https://files.pythonhosted.org/packages/69/c1/5ea46b2d4c98f5bf5c83fffab8a0ad293c9bc74df9ecfbafef10f77f7201/lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b", size = 4815288 }, + { url = "https://files.pythonhosted.org/packages/1d/51/a0acca077ad35da458f4d3f729ef98effd2b90f003440d35fc36323f8ae6/lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7", size = 5472435 }, + { url = "https://files.pythonhosted.org/packages/4d/6b/0989c9368986961a6b0f55b46c80404c4b758417acdb6d87bfc3bd5f4967/lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8", size = 4976354 }, + { url = "https://files.pythonhosted.org/packages/05/9e/87492d03ff604fbf656ed2bf3e2e8d28f5d58ea1f00ff27ac27b06509079/lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32", size = 5029973 }, + { url = "https://files.pythonhosted.org/packages/f9/cc/9ae1baf5472af88e19e2c454b3710c1be9ecafb20eb474eeabcd88a055d2/lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86", size = 4888837 }, + { url = "https://files.pythonhosted.org/packages/d2/10/5594ffaec8c120d75b17e3ad23439b740a51549a9b5fd7484b2179adfe8f/lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5", size = 5530555 }, + { url = "https://files.pythonhosted.org/packages/ea/9b/de17f05377c8833343b629905571fb06cff2028f15a6f58ae2267662e341/lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03", size = 5405314 }, + { url = "https://files.pythonhosted.org/packages/8a/b4/227be0f1f3cca8255925985164c3838b8b36e441ff0cc10c1d3c6bdba031/lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7", size = 5079303 }, + { url = "https://files.pythonhosted.org/packages/5c/ee/19abcebb7fc40319bb71cd6adefa1ad94d09b5660228715854d6cc420713/lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80", size = 3475126 }, + { url = "https://files.pythonhosted.org/packages/a1/35/183d32551447e280032b2331738cd850da435a42f850b71ebeaab42c1313/lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3", size = 3805065 }, + { url = "https://files.pythonhosted.org/packages/5c/a8/449faa2a3cbe6a99f8d38dcd51a3ee8844c17862841a6f769ea7c2a9cd0f/lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b", size = 8141056 }, + { url = "https://files.pythonhosted.org/packages/ac/8a/ae6325e994e2052de92f894363b038351c50ee38749d30cc6b6d96aaf90f/lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18", size = 4425238 }, + { url = "https://files.pythonhosted.org/packages/f8/fb/128dddb7f9086236bce0eeae2bfb316d138b49b159f50bc681d56c1bdd19/lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442", size = 5095197 }, + { url = "https://files.pythonhosted.org/packages/b4/f9/a181a8ef106e41e3086629c8bdb2d21a942f14c84a0e77452c22d6b22091/lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4", size = 4809809 }, + { url = "https://files.pythonhosted.org/packages/25/2f/b20565e808f7f6868aacea48ddcdd7e9e9fb4c799287f21f1a6c7c2e8b71/lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f", size = 5407593 }, + { url = "https://files.pythonhosted.org/packages/23/0e/caac672ec246d3189a16c4d364ed4f7d6bf856c080215382c06764058c08/lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e", size = 4866657 }, + { url = "https://files.pythonhosted.org/packages/67/a4/1f5fbd3f58d4069000522196b0b776a014f3feec1796da03e495cf23532d/lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c", size = 4967017 }, + { url = "https://files.pythonhosted.org/packages/ee/73/623ecea6ca3c530dd0a4ed0d00d9702e0e85cd5624e2d5b93b005fe00abd/lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16", size = 4810730 }, + { url = "https://files.pythonhosted.org/packages/1d/ce/fb84fb8e3c298f3a245ae3ea6221c2426f1bbaa82d10a88787412a498145/lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79", size = 5455154 }, + { url = "https://files.pythonhosted.org/packages/b1/72/4d1ad363748a72c7c0411c28be2b0dc7150d91e823eadad3b91a4514cbea/lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080", size = 4969416 }, + { url = "https://files.pythonhosted.org/packages/42/07/b29571a58a3a80681722ea8ed0ba569211d9bb8531ad49b5cacf6d409185/lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654", size = 5013672 }, + { url = "https://files.pythonhosted.org/packages/b9/93/bde740d5a58cf04cbd38e3dd93ad1e36c2f95553bbf7d57807bc6815d926/lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d", size = 4878644 }, + { url = "https://files.pythonhosted.org/packages/56/b5/645c8c02721d49927c93181de4017164ec0e141413577687c3df8ff0800f/lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763", size = 5511531 }, + { url = "https://files.pythonhosted.org/packages/85/3f/6a99a12d9438316f4fc86ef88c5d4c8fb674247b17f3173ecadd8346b671/lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec", size = 5402065 }, + { url = "https://files.pythonhosted.org/packages/80/8a/df47bff6ad5ac57335bf552babfb2408f9eb680c074ec1ba412a1a6af2c5/lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be", size = 5069775 }, + { url = "https://files.pythonhosted.org/packages/08/ae/e7ad0f0fbe4b6368c5ee1e3ef0c3365098d806d42379c46c1ba2802a52f7/lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9", size = 3474226 }, + { url = "https://files.pythonhosted.org/packages/c3/b5/91c2249bfac02ee514ab135e9304b89d55967be7e53e94a879b74eec7a5c/lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1", size = 3814971 }, + { url = "https://files.pythonhosted.org/packages/eb/6d/d1f1c5e40c64bf62afd7a3f9b34ce18a586a1cccbf71e783cd0a6d8e8971/lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859", size = 8171753 }, + { url = "https://files.pythonhosted.org/packages/bd/83/26b1864921869784355459f374896dcf8b44d4af3b15d7697e9156cb2de9/lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e", size = 4441955 }, + { url = "https://files.pythonhosted.org/packages/e0/d2/e9bff9fb359226c25cda3538f664f54f2804f4b37b0d7c944639e1a51f69/lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f", size = 5050778 }, + { url = "https://files.pythonhosted.org/packages/88/69/6972bfafa8cd3ddc8562b126dd607011e218e17be313a8b1b9cc5a0ee876/lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e", size = 4748628 }, + { url = "https://files.pythonhosted.org/packages/5d/ea/a6523c7c7f6dc755a6eed3d2f6d6646617cad4d3d6d8ce4ed71bfd2362c8/lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179", size = 5322215 }, + { url = "https://files.pythonhosted.org/packages/99/37/396fbd24a70f62b31d988e4500f2068c7f3fd399d2fd45257d13eab51a6f/lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a", size = 4813963 }, + { url = "https://files.pythonhosted.org/packages/09/91/e6136f17459a11ce1757df864b213efbeab7adcb2efa63efb1b846ab6723/lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3", size = 4923353 }, + { url = "https://files.pythonhosted.org/packages/1d/7c/2eeecf87c9a1fca4f84f991067c693e67340f2b7127fc3eca8fa29d75ee3/lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1", size = 4740541 }, + { url = "https://files.pythonhosted.org/packages/3b/ed/4c38ba58defca84f5f0d0ac2480fdcd99fc7ae4b28fc417c93640a6949ae/lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d", size = 5346504 }, + { url = "https://files.pythonhosted.org/packages/a5/22/bbd3995437e5745cb4c2b5d89088d70ab19d4feabf8a27a24cecb9745464/lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c", size = 4898077 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/94537acfb5b8f18235d13186d247bca478fea5e87d224644e0fe907df976/lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99", size = 4946543 }, + { url = "https://files.pythonhosted.org/packages/8d/e8/4b15df533fe8e8d53363b23a41df9be907330e1fa28c7ca36893fad338ee/lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff", size = 4816841 }, + { url = "https://files.pythonhosted.org/packages/1a/e7/03f390ea37d1acda50bc538feb5b2bda6745b25731e4e76ab48fae7106bf/lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a", size = 5417341 }, + { url = "https://files.pythonhosted.org/packages/ea/99/d1133ab4c250da85a883c3b60249d3d3e7c64f24faff494cf0fd23f91e80/lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8", size = 5327539 }, + { url = "https://files.pythonhosted.org/packages/7d/ed/e6276c8d9668028213df01f598f385b05b55a4e1b4662ee12ef05dab35aa/lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d", size = 5012542 }, + { url = "https://files.pythonhosted.org/packages/36/88/684d4e800f5aa28df2a991a6a622783fb73cf0e46235cfa690f9776f032e/lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30", size = 3486454 }, + { url = "https://files.pythonhosted.org/packages/fc/82/ace5a5676051e60355bd8fb945df7b1ba4f4fb8447f2010fb816bfd57724/lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f", size = 3816857 }, + { url = "https://files.pythonhosted.org/packages/94/6a/42141e4d373903bfea6f8e94b2f554d05506dfda522ada5343c651410dc8/lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a", size = 8156284 }, + { url = "https://files.pythonhosted.org/packages/91/5e/fa097f0f7d8b3d113fb7312c6308af702f2667f22644441715be961f2c7e/lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd", size = 4432407 }, + { url = "https://files.pythonhosted.org/packages/2d/a1/b901988aa6d4ff937f2e5cfc114e4ec561901ff00660c3e56713642728da/lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51", size = 5048331 }, + { url = "https://files.pythonhosted.org/packages/30/0f/b2a54f48e52de578b71bbe2a2f8160672a8a5e103df3a78da53907e8c7ed/lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b", size = 4744835 }, + { url = "https://files.pythonhosted.org/packages/82/9d/b000c15538b60934589e83826ecbc437a1586488d7c13f8ee5ff1f79a9b8/lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002", size = 5316649 }, + { url = "https://files.pythonhosted.org/packages/e3/ee/ffbb9eaff5e541922611d2c56b175c45893d1c0b8b11e5a497708a6a3b3b/lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4", size = 4812046 }, + { url = "https://files.pythonhosted.org/packages/15/ff/7ff89d567485c7b943cdac316087f16b2399a8b997007ed352a1248397e5/lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492", size = 4918597 }, + { url = "https://files.pythonhosted.org/packages/c6/a3/535b6ed8c048412ff51268bdf4bf1cf052a37aa7e31d2e6518038a883b29/lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3", size = 4738071 }, + { url = "https://files.pythonhosted.org/packages/7a/8f/cbbfa59cb4d4fd677fe183725a76d8c956495d7a3c7f111ab8f5e13d2e83/lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4", size = 5342213 }, + { url = "https://files.pythonhosted.org/packages/5c/fb/db4c10dd9958d4b52e34d1d1f7c1f434422aeaf6ae2bbaaff2264351d944/lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367", size = 4893749 }, + { url = "https://files.pythonhosted.org/packages/f2/38/bb4581c143957c47740de18a3281a0cab7722390a77cc6e610e8ebf2d736/lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832", size = 4945901 }, + { url = "https://files.pythonhosted.org/packages/fc/d5/18b7de4960c731e98037bd48fa9f8e6e8f2558e6fbca4303d9b14d21ef3b/lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff", size = 4815447 }, + { url = "https://files.pythonhosted.org/packages/97/a8/cd51ceaad6eb849246559a8ef60ae55065a3df550fc5fcd27014361c1bab/lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd", size = 5411186 }, + { url = "https://files.pythonhosted.org/packages/89/c3/1e3dabab519481ed7b1fdcba21dcfb8832f57000733ef0e71cf6d09a5e03/lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb", size = 5324481 }, + { url = "https://files.pythonhosted.org/packages/b6/17/71e9984cf0570cd202ac0a1c9ed5c1b8889b0fc8dc736f5ef0ffb181c284/lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b", size = 5011053 }, + { url = "https://files.pythonhosted.org/packages/69/68/9f7e6d3312a91e30829368c2b3217e750adef12a6f8eb10498249f4e8d72/lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957", size = 3485634 }, + { url = "https://files.pythonhosted.org/packages/7d/db/214290d58ad68c587bd5d6af3d34e56830438733d0d0856c0275fde43652/lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d", size = 3814417 }, + { url = "https://files.pythonhosted.org/packages/99/f7/b73a431c8500565aa500e99e60b448d305eaf7c0b4c893c7c5a8a69cc595/lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c", size = 3925431 }, + { url = "https://files.pythonhosted.org/packages/db/48/4a206623c0d093d0e3b15f415ffb4345b0bdf661a3d0b15a112948c033c7/lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a", size = 4216683 }, + { url = "https://files.pythonhosted.org/packages/54/47/577820c45dd954523ae8453b632d91e76da94ca6d9ee40d8c98dd86f916b/lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005", size = 4326732 }, + { url = "https://files.pythonhosted.org/packages/68/de/96cb6d3269bc994b4f5ede8ca7bf0840f5de0a278bc6e50cb317ff71cafa/lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce", size = 4218377 }, + { url = "https://files.pythonhosted.org/packages/a5/43/19b1ef6cbffa4244a217f95cc5f41a6cb4720fed33510a49670b03c5f1a0/lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83", size = 4351237 }, + { url = "https://files.pythonhosted.org/packages/ba/b2/6a22fb5c0885da3b00e116aee81f0b829ec9ac8f736cd414b4a09413fc7d/lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba", size = 3487557 }, +] + +[[package]] +name = "lz4" +version = "4.3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a4/31/ec1259ca8ad11568abaf090a7da719616ca96b60d097ccc5799cd0ff599c/lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e", size = 171509 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/53/61258b5effac76dea5768b07042b2c3c56e15a91194cef92284a0dc0f5e7/lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201", size = 254266 }, + { url = "https://files.pythonhosted.org/packages/92/84/c243a5515950d72ff04220fd49903801825e4ac23691e19e7082d9d9f94b/lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f", size = 212359 }, + { url = "https://files.pythonhosted.org/packages/10/26/5287564a909d069fdd6c25f2f420c58c5758993fa3ad2e064a7b610e6e5f/lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7", size = 1237799 }, + { url = "https://files.pythonhosted.org/packages/cf/50/75c8f966dbcc524e7253f99b8e04c6cad7328f517eb0323abf8b4068f5bb/lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05", size = 1263957 }, + { url = "https://files.pythonhosted.org/packages/91/54/0f61c77a9599beb14ac5b828e8da20a04c6eaadb4f3fdbd79a817c66eb74/lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc", size = 1184035 }, + { url = "https://files.pythonhosted.org/packages/8e/84/3be7fad87d84b67cd43174d67fc567e0aa3be154f8b0a1c2c0ff8df30854/lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6", size = 87235 }, + { url = "https://files.pythonhosted.org/packages/21/08/dc4714eb771b502deec8a714e40e5fbd2242bacd5fe55dcd29a0cb35c567/lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2", size = 99781 }, + { url = "https://files.pythonhosted.org/packages/f9/f7/cfb942edd53c8a6aba168720ccf3d6a0cac3e891a7feba97d5823b5dd047/lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6", size = 254267 }, + { url = "https://files.pythonhosted.org/packages/71/ca/046bd7e7e1ed4639eb398192374bc3fbf5010d3c168361fec161b63e8bfa/lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61", size = 212353 }, + { url = "https://files.pythonhosted.org/packages/0c/c2/5beb6a7bb7fd27cd5fe5bb93c15636d30987794b161e4609fbf20dc3b5c7/lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7", size = 1239095 }, + { url = "https://files.pythonhosted.org/packages/cf/d4/12915eb3083dfd1746d50b71b73334030b129cd25abbed9133dd2d413c21/lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563", size = 1265760 }, + { url = "https://files.pythonhosted.org/packages/94/7b/5e72b7504d7675b484812bfc65fe958f7649a64e0d6fe35c11812511f0b5/lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21", size = 1185451 }, + { url = "https://files.pythonhosted.org/packages/2f/b5/3726a678b3a0c64d24e71179e35e7ff8e3553da9d32c2fddce879d042b63/lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d", size = 87232 }, + { url = "https://files.pythonhosted.org/packages/55/f9/69ed96043dae4d982286a4dda2feb473f49e95e4c90a928ec583d93769a2/lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c", size = 99794 }, + { url = "https://files.pythonhosted.org/packages/4d/6f/081811b17ccaec5f06b3030756af2737841447849118a6e1078481a78c6c/lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d", size = 254213 }, + { url = "https://files.pythonhosted.org/packages/53/4d/8e04ef75feff8848ba3c624ce81c7732bdcea5f8f994758afa88cd3d7764/lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2", size = 212354 }, + { url = "https://files.pythonhosted.org/packages/a3/04/257a72d6a879dbc8c669018989f776fcdd5b4bf3c2c51c09a54f1ca31721/lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809", size = 1238643 }, + { url = "https://files.pythonhosted.org/packages/d9/93/4a7e489156fa7ded03ba9cde4a8ca7f373672b5787cac9a0391befa752a1/lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf", size = 1265014 }, + { url = "https://files.pythonhosted.org/packages/fd/a4/f84ebc23bc7602623b1b003b4e1120cbf86fb03a35c595c226be1985449b/lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e", size = 1184881 }, + { url = "https://files.pythonhosted.org/packages/de/3d/8ba48305378e84908221de143a21ba0c0ce52778893865cf85b66b1068da/lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1", size = 87241 }, + { url = "https://files.pythonhosted.org/packages/c4/5d/7b70965a0692de29af2af1007fe837f46fd456bbe2aa8f838a8543a3b5cb/lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f", size = 99776 }, +] + +[[package]] +name = "mako" +version = "1.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/03/fb5ba97ff65ce64f6d35b582aacffc26b693a98053fa831ab43a437cbddb/Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc", size = 392738 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/62/70f5a0c2dd208f9f3f2f9afd103aec42ee4d9ad2401d78342f75e9b8da36/Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a", size = 78565 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/54/ad5eb37bf9d51800010a74e4665425831a9db4e7c4e0fde4352e391e808e/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", size = 18206 }, + { url = "https://files.pythonhosted.org/packages/6a/4a/a4d49415e600bacae038c67f9fecc1d5433b9d3c71a4de6f33537b89654c/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", size = 14079 }, + { url = "https://files.pythonhosted.org/packages/0a/7b/85681ae3c33c385b10ac0f8dd025c30af83c78cec1c37a6aa3b55e67f5ec/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", size = 26620 }, + { url = "https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", size = 25818 }, + { url = "https://files.pythonhosted.org/packages/29/fe/a36ba8c7ca55621620b2d7c585313efd10729e63ef81e4e61f52330da781/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", size = 25493 }, + { url = "https://files.pythonhosted.org/packages/60/ae/9c60231cdfda003434e8bd27282b1f4e197ad5a710c14bee8bea8a9ca4f0/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", size = 30630 }, + { url = "https://files.pythonhosted.org/packages/65/dc/1510be4d179869f5dafe071aecb3f1f41b45d37c02329dfba01ff59e5ac5/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", size = 29745 }, + { url = "https://files.pythonhosted.org/packages/30/39/8d845dd7d0b0613d86e0ef89549bfb5f61ed781f59af45fc96496e897f3a/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", size = 30021 }, + { url = "https://files.pythonhosted.org/packages/c7/5c/356a6f62e4f3c5fbf2602b4771376af22a3b16efa74eb8716fb4e328e01e/MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", size = 16659 }, + { url = "https://files.pythonhosted.org/packages/69/48/acbf292615c65f0604a0c6fc402ce6d8c991276e16c80c46a8f758fbd30c/MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", size = 17213 }, + { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, + { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, + { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, + { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, + { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, + { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, + { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, + { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, + { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, + { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, + { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, + { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, + { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, + { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, + { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, + { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, + { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, + { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, + { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, + { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, +] + +[[package]] +name = "marshmallow" +version = "3.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/40/faa10dc4500bca85f41ca9d8cefab282dd23d0fcc7a9b5fab40691e72e76/marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e", size = 176836 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/78/c1de55eb3311f2c200a8b91724414b8d6f5ae78891c15d9d936ea43c3dba/marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9", size = 49334 }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "mem0ai" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "openai" }, + { name = "posthog" }, + { name = "pydantic" }, + { name = "pytz" }, + { name = "qdrant-client" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/9d/59233178b9852d0b652bae83902bb2a39798250064157e6f822b976216d0/mem0ai-0.0.20.tar.gz", hash = "sha256:459b96850156c8e51e321e3ab4e5f86fb00d75532c16ad41a3eb09578e0ce00a", size = 35391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/dd/8fdc8d1e1a7f87ad9f2a594b6f1691ecff589ec158d614a6089c6b6f1062/mem0ai-0.0.20-py3-none-any.whl", hash = "sha256:c19b2082173c818f3516279f0924bfd763e2d18175560332c94e415e5131fd3b", size = 50820 }, +] + +[[package]] +name = "mmh3" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/e2/40dc31be41d4913c488d55f1e75a276a5d1a840f5b79c2652d29eb7bf5ed/mmh3-5.0.0.tar.gz", hash = "sha256:60d1713457789c70292f1f04ca984e3175fc66e6c3545582fd2b4af7f5a61c73", size = 28684 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/de/19c97460492496ed5e21d8d7cffb86e26e27424e594992da7e111abf289d/mmh3-5.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e65e16c9de793bfa334355c60fccc859faf1c0b707d847252841cee72b5309df", size = 48553 }, + { url = "https://files.pythonhosted.org/packages/fa/89/d52316cf1565374a33318009edccc92d0d81d2e8ac141924d4827e58fc2d/mmh3-5.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66d5423c65aebe94f244e8204cc8b39a4b970e342fb619621376af90c5f9a421", size = 34034 }, + { url = "https://files.pythonhosted.org/packages/bc/05/35e048b5e77c989a16a225e915c6ad28db231ffcc43e6562774ec5e49195/mmh3-5.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5bfc15d11bcc5ce96254f5399582104c566a1eeeb91072ff0a0de92b24f704ff", size = 33896 }, + { url = "https://files.pythonhosted.org/packages/04/e6/22b1a7f1ecae2fab2ad58cfe0980521db6f26d6f0bc4e12149dde8128fed/mmh3-5.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1616cb621cab3bc5b58dba9605311c346b45e732818f12f943a803b9a641f09c", size = 89190 }, + { url = "https://files.pythonhosted.org/packages/ad/3e/9e14a3c742dbc9c584aef304c1c563754f18293369e615a2e8f724f13e23/mmh3-5.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3e3d94553b321b26952d507dace15509463604ead98fece710237ca8cb5983d", size = 94082 }, + { url = "https://files.pythonhosted.org/packages/cb/f7/94f1988fef130ce1ac4aad243c7952fb2620c312613a0ca7178a148451e0/mmh3-5.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b46b6d5c0fcd2620ebc699099fed6fda0101805d7d4c274fa2b2c384e8b9e8b9", size = 93741 }, + { url = "https://files.pythonhosted.org/packages/72/ba/7ad763b950a68c070fe10e02a6a7d5e4e7d23f6802fb950bb26203a731e7/mmh3-5.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a2a9cd368f2a15f06f2749db764e410b7dc260822319f169a52c649da078bf6", size = 82005 }, + { url = "https://files.pythonhosted.org/packages/94/64/436925da3a70fed754e4636d84d4c0251e7d3751108f3f91c88aaa81beb8/mmh3-5.0.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:903cdb66f8d8de26a0a6c737a13664388b5ed14813f84793bccbba44ffa09fa2", size = 88921 }, + { url = "https://files.pythonhosted.org/packages/d4/2c/c6a4adba7edf8a4c8f3af1d0bea9f882bb290fd74f7d601eb702b33e2840/mmh3-5.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7653d8177e4241a5f7913a675636ccc701722741b153b0a43c82464a4a865752", size = 89282 }, + { url = "https://files.pythonhosted.org/packages/8a/66/0e9ecf5f860864a3524ddb6bbe8b317f7593340e5e0cc24c9d93c985ef7d/mmh3-5.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a9154a0a32de54b812d178541468cce4c73b112fbd8b5b0a4add92cfda69c390", size = 84123 }, + { url = "https://files.pythonhosted.org/packages/3e/04/f73fe2bbde831e78b4d6f5d9905aa195de7fb8115d8bc8a4f22c0e5e1145/mmh3-5.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5f814a621fd0e567a121ace724ad57f28fb9972acfd54c854749ee91d5c4905c", size = 95029 }, + { url = "https://files.pythonhosted.org/packages/22/06/bc22a7c776c2f19349dc6675d2da33d24d62c2bbcfab90947aad0e615322/mmh3-5.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:333057bcf12804a8cae12adb2a5cef3bc50fc8de044ad6a01ae1918a342d6f0e", size = 89633 }, + { url = "https://files.pythonhosted.org/packages/d6/04/c20c2b285b0f0de6cdad691153fb634b17b192bee1e3dbd92cc0f3dd4a28/mmh3-5.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e713afe276481af7ea0330f40214c71c11486b28754aa2d1cb286b5ee9571dee", size = 88484 }, + { url = "https://files.pythonhosted.org/packages/1c/7b/c3de7359af41670a181fae919a6e1fee095aada20d3173942464f12174d4/mmh3-5.0.0-cp310-cp310-win32.whl", hash = "sha256:917b72bc8b238286d7e2c586c19d604b3b3e5a93f054020cd15530324b868e6e", size = 34836 }, + { url = "https://files.pythonhosted.org/packages/8e/50/4e629064bc48c17a970b6897bb799b2fd1774cc136231ab34ab9471c0e2d/mmh3-5.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a8bcaa5c0fd9c4679639c830b46e52fcc3b84502faa2aa5f3ca1dacb7cdbb1f", size = 35421 }, + { url = "https://files.pythonhosted.org/packages/b9/59/1e767bbea3b68205e78c963107932fb7e87046f78f4a74f5394aaea131cd/mmh3-5.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:341ad4f902ebb7fc14751fab67fb4eedf800a1744bc9dc214a56e11b62d0bfdd", size = 32190 }, + { url = "https://files.pythonhosted.org/packages/28/b8/27d06956add5f882c7120ade726f6e772325a267a4bb0f1589397d75163d/mmh3-5.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:690cb4b36ed7c863680029c2370b4f3a0c3dc8900320eb5ce79a867eb8b37151", size = 48554 }, + { url = "https://files.pythonhosted.org/packages/60/da/135b43806e59b1fcc7a6c27a029922c60785353c84c4bd4915b603725b4f/mmh3-5.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5580c7bf9570458d8096a016f7012453306a14d2e3e4ef1267b9c9c9f506d8a4", size = 34036 }, + { url = "https://files.pythonhosted.org/packages/29/99/98d47eb4d18556a7fd13bfa4d7a358faef1ba08981c1d7cdc21642fd1bfe/mmh3-5.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f271a043545c86cce51c35e5bb6f52dceb535dcd2d46c2129a9c869b80ec2eaa", size = 33897 }, + { url = "https://files.pythonhosted.org/packages/85/7e/1da6f2c77a2db61fe73c5832f51ed604f07dd989b03027308030e09f8714/mmh3-5.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2400e805018e04718c9367e85d694c036d21a41b37024d5b0dc8ef80cb984cf0", size = 90792 }, + { url = "https://files.pythonhosted.org/packages/a1/7e/ee067c17df65f3b30f1dbfe75b9f87ff114a04125286f55912fb5b1c5691/mmh3-5.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3d75183b7d5df178bf01f0d9ac366525fd54e828d799fe3892882b83c13454b", size = 95739 }, + { url = "https://files.pythonhosted.org/packages/94/d1/d197e47e9a28e6d4faaad7ee4cab0c2c79a246a0995a0609e9a8db9fe2e4/mmh3-5.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec24039151e67c5fc026ce5be8788e3e8093d9ce3e114d7a6175f453521bacc9", size = 95361 }, + { url = "https://files.pythonhosted.org/packages/4d/0d/2849b96caf2bad20f9af4a27d11a1cd6a62018b28e7f333875b15d468cce/mmh3-5.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88513eb686937b26d33fe7ded7ed1a68ede490d326eea2344447c0cc42fb7f97", size = 83235 }, + { url = "https://files.pythonhosted.org/packages/a7/3c/756620970d575c66677c9c13ca871d52b8b23f8f13cb6c81cc85caeb0658/mmh3-5.0.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a015a90951b31d45a01c8f92703c028d8d759f71dc31727581c916f31cacbade", size = 90467 }, + { url = "https://files.pythonhosted.org/packages/2e/63/b6a45319059855fbcb20a02bc5803deb10327842846d98d559cf1c35e89f/mmh3-5.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:337f747eaf6ab78d0be9cbbb763781ae76eb2e4c0e6523e74582877fe827ec51", size = 86122 }, + { url = "https://files.pythonhosted.org/packages/53/55/0269256d61783a783a82c189e165a00dcc5499f0670049e5c408e2162f6a/mmh3-5.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01636d74df6371f192dc5a112f4afc43cb4119a9ea6de704469161fd4ab9e86b", size = 85142 }, + { url = "https://files.pythonhosted.org/packages/01/a7/9f68185843ba171c52b04db3ab5a39f991fbeedb89da85c2a0d533995c00/mmh3-5.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3d969f1813b27bd4caac359e0ea0f6b711d252e872c0723124b58d7ac275cb0e", size = 90393 }, + { url = "https://files.pythonhosted.org/packages/8c/61/04d196fb38d79c11f4858122844916d55fd061b6650902c74f3268ce8016/mmh3-5.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f699f2232efe3907a6a05d9661edd2f4045a0e05161dc1087f72957d752a47a", size = 86743 }, + { url = "https://files.pythonhosted.org/packages/51/43/564b427dc2a3c9d90ae4946a25a14926ff3bf71625ebd944967ed0a7005d/mmh3-5.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:16f46dc1774bf3e8305fa33207a502317cd7a592cb79b0d65c84f0fbcf9d9ffa", size = 85260 }, + { url = "https://files.pythonhosted.org/packages/b8/74/f7201c1e5ef5cd84f210ba0f6a5810e2726dfdc22c1ddc5c81363b286997/mmh3-5.0.0-cp311-cp311-win32.whl", hash = "sha256:2d039b32f4194ac345c0f52441b7ff0a55735a896303a3eb9054e5f8512d84c8", size = 34838 }, + { url = "https://files.pythonhosted.org/packages/77/c1/dac0e65e482c9f6205020c06d5db20397745211d162be7dcd98e0f2cec30/mmh3-5.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:47b3823a88c5f7859580688016bff13437fd866f6132e4770b306f0c6edb01a7", size = 35417 }, + { url = "https://files.pythonhosted.org/packages/6f/29/35110041e065bacbdd6440eb29e6d9b0bb6b396d221515ffed14a10c3fd0/mmh3-5.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:2ee87816b838709bd0fc623e3277736e9465a941d27b14f35d0df1c2006e4438", size = 32190 }, + { url = "https://files.pythonhosted.org/packages/4a/46/e3bdf55ddadb6174414df98c64d0045d2142efc8657c9bb0686aaba3ba88/mmh3-5.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed23a89a6daeb9b0d4db9fa865b125300516e8d6961f771b2dd97965bf888bce", size = 48598 }, + { url = "https://files.pythonhosted.org/packages/8f/50/a2ab4155fa338acfdc020b752057c164a3350495747f02cde4d73127d8e8/mmh3-5.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b020012e30ba0e4b002a150ca1c1d590a7387fac50dfd9b6b5dc66d9d0e61257", size = 34076 }, + { url = "https://files.pythonhosted.org/packages/57/64/6682b31df93c46b4a55ea3de1b328d6a2d651e0b01a0709b2f92ae05d26d/mmh3-5.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e86246d036da05d54e833cdc342ad9c62f38f2507b14f2c58ce2c271f22d7251", size = 33894 }, + { url = "https://files.pythonhosted.org/packages/fd/9e/6866f4cb9be5908eff335369f8d214a04718fda6ab3821f13e1069344dfb/mmh3-5.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f50535009c1aaa44f9702ad14551d12d1755b537fc25a5bd7d46c493ec4bcfaa", size = 90779 }, + { url = "https://files.pythonhosted.org/packages/0d/2c/00fded897ff8a05f6241b2e3f396094c122868d9cd6508d59ffd35faa2b6/mmh3-5.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a567a87026f103778b70f2b19637fb490d9c29dc7d3af9cd46185d48efbd49dc", size = 95742 }, + { url = "https://files.pythonhosted.org/packages/ee/f2/0bfc0434845034e3afe9d38775cbd91d9d8b1e0858ac1b3cdd49ff522f4e/mmh3-5.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4405f231032b8f8ae93d9a558ddc04b9aa94a59c309cb265ebe1e79ced920e", size = 95453 }, + { url = "https://files.pythonhosted.org/packages/44/42/e3fa4bd3222a87e7f8fe6444d903024efd6cc901f4ba30a02913a64824a5/mmh3-5.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d630fe75ef7d4ab1a95eb824d81dee15ed73020703bf030515f03283cb8f086f", size = 83332 }, + { url = "https://files.pythonhosted.org/packages/d4/d3/82a119f7a079cac3e07ae50756d85df89ed9d915856168035d1f0572f647/mmh3-5.0.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361f9d611debc10992888cbace09dbc47391ae8b84b50c995a6d97e6314bb422", size = 90665 }, + { url = "https://files.pythonhosted.org/packages/92/3a/743c53d584acf07a8e9a78f2fc2815996d01f63acd05f57b66b8428475f0/mmh3-5.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:11e29156edb1990600cb4b91bcb371a2adf20a0fcb818837fb78408be19e9117", size = 86172 }, + { url = "https://files.pythonhosted.org/packages/6a/34/27cf51e7a4697b4a0ac2fde1cbea3ebe5ed0899645cdcfdd741d1183f0a2/mmh3-5.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e70c4fc340509f6554a1748fe6f539a846fbacf147255048d1702def820a1520", size = 85242 }, + { url = "https://files.pythonhosted.org/packages/b5/26/b6c6b713341674200d10b6b39d9523a54afbad305c226f14019e1505410a/mmh3-5.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:aa790a46370eeedb474c462820be78597452f54c5fffe1f810fc8e847faf42a1", size = 90558 }, + { url = "https://files.pythonhosted.org/packages/6e/2c/916c881e5ec5920ea1f26ea749329b195a850628e2bdc5e5dd76b74b9714/mmh3-5.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce678ac7d31e83fecc817ab630cc7254c7826de11fd2b3e8c31a8a5b0b762884", size = 87016 }, + { url = "https://files.pythonhosted.org/packages/03/8c/3f2c216f16f05da7f48893144a63f88466f8c7dbb8f6b955d3714e8e833f/mmh3-5.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee22cd4cad4fc7d4e5282b2729899460b03c388cde0290d411e877249f78a373", size = 85496 }, + { url = "https://files.pythonhosted.org/packages/8e/0c/f30776ad91410d35148fcd7595ca2489e6f9c8cead8350ac976fc2ccf162/mmh3-5.0.0-cp312-cp312-win32.whl", hash = "sha256:cb1a96488dc8fccf843ccdbdd10faf1939e6e18cd928c2beff17e70c2ab09ec1", size = 34857 }, + { url = "https://files.pythonhosted.org/packages/86/6b/24c9c618993a5d5612a12c156aa30b6a58e9123d9798642ed8d9608ef6f2/mmh3-5.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:897ebafa83bbbbb1958ee30cda78c7ad4b12f2b9360f96b22e488eb127b2cb4f", size = 35456 }, + { url = "https://files.pythonhosted.org/packages/88/a1/e8f896aa6a61c8655e5ba708004700c142f2c34d02222d3e1afdcf2aaead/mmh3-5.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:e6e3334985865ec3bdfcc4ae8df4a1939be048c5ae3ce1c8c309744400f8e4de", size = 32189 }, + { url = "https://files.pythonhosted.org/packages/1f/1d/621df14c109a53148717a6e9aceef93b1a65bb46938cfc176eb5fb8664b4/mmh3-5.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:05b09d98cbdb6ad03eb9c701e87cea005ececd4fd7d2968ff0f5a86af1ef340d", size = 48594 }, + { url = "https://files.pythonhosted.org/packages/1a/90/f3f4fe3fea68d949fbe554841e036c33d571756ae4a9921d27e8fdb5e3e8/mmh3-5.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d4ac0b8d48ce1e7561cf330ec73b9582f6773e40aaf8a771dd51a0bb483ec94f", size = 34084 }, + { url = "https://files.pythonhosted.org/packages/77/eb/f6f734766113017a9d76bf25f83a8bc7b09a3798b92d357ad0418ed18bbc/mmh3-5.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5a1c056b65be3077496ed655731eff1f65ee67f2b31f40a027f3171ba0ac20d1", size = 33893 }, + { url = "https://files.pythonhosted.org/packages/65/24/e52443fb243a479513b7c1811798dd29d285b4d5edd2ef412aa77d0637bb/mmh3-5.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a572adb41cf831d79b3b38c7994e5a5bc1a8ea8d7b574ce0c24272fc5abb52df", size = 90675 }, + { url = "https://files.pythonhosted.org/packages/99/ad/9f1e0d11d4ed095b0c02a424b414b51596a18dacfb8f06912d042d597bb6/mmh3-5.0.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0be4d14ab3a690bac6c127733490493b8698f84eadb9d667be7eb952281c51e4", size = 95656 }, + { url = "https://files.pythonhosted.org/packages/f2/12/a7a3b84645106ad5c64e16557a97ab305673bbb6fe14ab55c1fc23083939/mmh3-5.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b65b6eabd9e78e2b8eee2b8d4db34d4d2f5b540f2ac06ec0a76a1f1566f0ff7", size = 95353 }, + { url = "https://files.pythonhosted.org/packages/68/66/6ccf44dc2adcde5bfd30a642880a51cf78712fd74195d88e103b8d10d084/mmh3-5.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b433656f371af5edf44cf00862e288e08327bb9e90c8aaa5e4e60dfebc62039", size = 83291 }, + { url = "https://files.pythonhosted.org/packages/43/55/a924e81867406bb0d1bf59039732e927a05a5c56b08e7b894687fda71881/mmh3-5.0.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b12073a58be5e6408c6bd8366bbf6253defe042cdec25ee51f123c944e5a8f", size = 90524 }, + { url = "https://files.pythonhosted.org/packages/a0/c4/aa8f8527dfeff955e7be85901fde840df407a26b7f52e2a114e094c08422/mmh3-5.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:29b2c8eb7a473f6f235c2d9905912a76350dd11b42058364de984264fa4f74ca", size = 86180 }, + { url = "https://files.pythonhosted.org/packages/96/84/16a2a0a196c151d5d3d5513c5a5042b9c1cef428f68f2668337924b04fad/mmh3-5.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b178b744685de956ff84b3b2531271824a2e4372aff199ab805e1fdd7f996f5c", size = 85283 }, + { url = "https://files.pythonhosted.org/packages/e4/84/07584c6fbf82981359e3f702ed56a1e42657f4d670ae8e505c62df55a0cf/mmh3-5.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fcac7a75846aec7bd168c05576dc8c9448a9705165dfa0986d0f48952eca62a4", size = 90621 }, + { url = "https://files.pythonhosted.org/packages/c9/1b/cefb28385f8bfa24c058caf106a5cf7a28a238d06a7ce27a41b50a7b06bf/mmh3-5.0.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cc0caa0d2800d54384cc048e49e6c2b4a90cba1afff0597d7c2a5006c42b5536", size = 87006 }, + { url = "https://files.pythonhosted.org/packages/7b/cb/9da22a15b73ae5346fb51c7fab665adb9b4cf79038299c6eaea9b68b4f55/mmh3-5.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:05b10476fd5cfb0fd63ceebf7430612423e7f79e9732e0b344225aa642e12be4", size = 85546 }, + { url = "https://files.pythonhosted.org/packages/e4/03/249dc33217095088c9519ddda809e728d18a8cf1eefa2619c2586224eb2c/mmh3-5.0.0-cp313-cp313-win32.whl", hash = "sha256:7101a12a2a4b39d7748d2d83310d5e0415950ccf6b9ec8da1d35af3f50b3ef0e", size = 34852 }, + { url = "https://files.pythonhosted.org/packages/a2/b5/cb5b2fa2ceb34a65367afef283126c79f15e8f864bea50742f7184f6acf2/mmh3-5.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:47d9a9c1c48accaf78ddb77669c40c837e90be2ecddd39bf7ef2f8dacff85ca6", size = 35462 }, + { url = "https://files.pythonhosted.org/packages/8c/df/c070cf4dd4425f1c2abbff9b922ffb7a5161667a39cf32c618422406d78b/mmh3-5.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9126155ad1a9418920360497a0b44906dce32f0732cb44378ace08c62751dd1e", size = 32193 }, +] + +[[package]] +name = "monotonic" +version = "1.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/ca/8e91948b782ddfbd194f323e7e7d9ba12e5877addf04fb2bf8fca38e86ac/monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7", size = 7615 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/67/7e8406a29b6c45be7af7740456f7f37025f0506ae2e05fb9009a53946860/monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c", size = 8154 }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, +] + +[[package]] +name = "multidict" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/68/259dee7fd14cf56a17c554125e534f6274c2860159692a414d0b402b9a6d/multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60", size = 48628 }, + { url = "https://files.pythonhosted.org/packages/50/79/53ba256069fe5386a4a9e80d4e12857ced9de295baf3e20c68cdda746e04/multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1", size = 29327 }, + { url = "https://files.pythonhosted.org/packages/ff/10/71f1379b05b196dae749b5ac062e87273e3f11634f447ebac12a571d90ae/multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53", size = 29689 }, + { url = "https://files.pythonhosted.org/packages/71/45/70bac4f87438ded36ad4793793c0095de6572d433d98575a5752629ef549/multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5", size = 126639 }, + { url = "https://files.pythonhosted.org/packages/80/cf/17f35b3b9509b4959303c05379c4bfb0d7dd05c3306039fc79cf035bbac0/multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581", size = 134315 }, + { url = "https://files.pythonhosted.org/packages/ef/1f/652d70ab5effb33c031510a3503d4d6efc5ec93153562f1ee0acdc895a57/multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56", size = 129471 }, + { url = "https://files.pythonhosted.org/packages/a6/64/2dd6c4c681688c0165dea3975a6a4eab4944ea30f35000f8b8af1df3148c/multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429", size = 124585 }, + { url = "https://files.pythonhosted.org/packages/87/56/e6ee5459894c7e554b57ba88f7257dc3c3d2d379cb15baaa1e265b8c6165/multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748", size = 116957 }, + { url = "https://files.pythonhosted.org/packages/36/9e/616ce5e8d375c24b84f14fc263c7ef1d8d5e8ef529dbc0f1df8ce71bb5b8/multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db", size = 128609 }, + { url = "https://files.pythonhosted.org/packages/8c/4f/4783e48a38495d000f2124020dc96bacc806a4340345211b1ab6175a6cb4/multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056", size = 123016 }, + { url = "https://files.pythonhosted.org/packages/3e/b3/4950551ab8fc39862ba5e9907dc821f896aa829b4524b4deefd3e12945ab/multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76", size = 133542 }, + { url = "https://files.pythonhosted.org/packages/96/4d/f0ce6ac9914168a2a71df117935bb1f1781916acdecbb43285e225b484b8/multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160", size = 130163 }, + { url = "https://files.pythonhosted.org/packages/be/72/17c9f67e7542a49dd252c5ae50248607dfb780bcc03035907dafefb067e3/multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7", size = 126832 }, + { url = "https://files.pythonhosted.org/packages/71/9f/72d719e248cbd755c8736c6d14780533a1606ffb3fbb0fbd77da9f0372da/multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0", size = 26402 }, + { url = "https://files.pythonhosted.org/packages/04/5a/d88cd5d00a184e1ddffc82aa2e6e915164a6d2641ed3606e766b5d2f275a/multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d", size = 28800 }, + { url = "https://files.pythonhosted.org/packages/93/13/df3505a46d0cd08428e4c8169a196131d1b0c4b515c3649829258843dde6/multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6", size = 48570 }, + { url = "https://files.pythonhosted.org/packages/f0/e1/a215908bfae1343cdb72f805366592bdd60487b4232d039c437fe8f5013d/multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156", size = 29316 }, + { url = "https://files.pythonhosted.org/packages/70/0f/6dc70ddf5d442702ed74f298d69977f904960b82368532c88e854b79f72b/multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb", size = 29640 }, + { url = "https://files.pythonhosted.org/packages/d8/6d/9c87b73a13d1cdea30b321ef4b3824449866bd7f7127eceed066ccb9b9ff/multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b", size = 131067 }, + { url = "https://files.pythonhosted.org/packages/cc/1e/1b34154fef373371fd6c65125b3d42ff5f56c7ccc6bfff91b9b3c60ae9e0/multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72", size = 138507 }, + { url = "https://files.pythonhosted.org/packages/fb/e0/0bc6b2bac6e461822b5f575eae85da6aae76d0e2a79b6665d6206b8e2e48/multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304", size = 133905 }, + { url = "https://files.pythonhosted.org/packages/ba/af/73d13b918071ff9b2205fcf773d316e0f8fefb4ec65354bbcf0b10908cc6/multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351", size = 129004 }, + { url = "https://files.pythonhosted.org/packages/74/21/23960627b00ed39643302d81bcda44c9444ebcdc04ee5bedd0757513f259/multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb", size = 121308 }, + { url = "https://files.pythonhosted.org/packages/8b/5c/cf282263ffce4a596ed0bb2aa1a1dddfe1996d6a62d08842a8d4b33dca13/multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3", size = 132608 }, + { url = "https://files.pythonhosted.org/packages/d7/3e/97e778c041c72063f42b290888daff008d3ab1427f5b09b714f5a8eff294/multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399", size = 127029 }, + { url = "https://files.pythonhosted.org/packages/47/ac/3efb7bfe2f3aefcf8d103e9a7162572f01936155ab2f7ebcc7c255a23212/multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423", size = 137594 }, + { url = "https://files.pythonhosted.org/packages/42/9b/6c6e9e8dc4f915fc90a9b7798c44a30773dea2995fdcb619870e705afe2b/multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3", size = 134556 }, + { url = "https://files.pythonhosted.org/packages/1d/10/8e881743b26aaf718379a14ac58572a240e8293a1c9d68e1418fb11c0f90/multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753", size = 130993 }, + { url = "https://files.pythonhosted.org/packages/45/84/3eb91b4b557442802d058a7579e864b329968c8d0ea57d907e7023c677f2/multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80", size = 26405 }, + { url = "https://files.pythonhosted.org/packages/9f/0b/ad879847ecbf6d27e90a6eabb7eff6b62c129eefe617ea45eae7c1f0aead/multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926", size = 28795 }, + { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713 }, + { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516 }, + { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557 }, + { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170 }, + { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836 }, + { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475 }, + { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049 }, + { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370 }, + { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178 }, + { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567 }, + { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822 }, + { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656 }, + { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360 }, + { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382 }, + { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529 }, + { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 }, + { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 }, + { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 }, + { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 }, + { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 }, + { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 }, + { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 }, + { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 }, + { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 }, + { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 }, + { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 }, + { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 }, + { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 }, + { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426 }, + { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531 }, + { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 }, +] + +[[package]] +name = "multiprocess" +version = "0.70.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dill" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee", size = 134980 }, + { url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec", size = 134982 }, + { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824 }, + { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519 }, + { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741 }, +] + +[[package]] +name = "mypy" +version = "1.11.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/cd/815368cd83c3a31873e5e55b317551500b12f2d1d7549720632f32630333/mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a", size = 10939401 }, + { url = "https://files.pythonhosted.org/packages/f1/27/e18c93a195d2fad75eb96e1f1cbc431842c332e8eba2e2b77eaf7313c6b7/mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef", size = 10111697 }, + { url = "https://files.pythonhosted.org/packages/dc/08/cdc1fc6d0d5a67d354741344cc4aa7d53f7128902ebcbe699ddd4f15a61c/mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383", size = 12500508 }, + { url = "https://files.pythonhosted.org/packages/64/12/aad3af008c92c2d5d0720ea3b6674ba94a98cdb86888d389acdb5f218c30/mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8", size = 13020712 }, + { url = "https://files.pythonhosted.org/packages/03/e6/a7d97cc124a565be5e9b7d5c2a6ebf082379ffba99646e4863ed5bbcb3c3/mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7", size = 9567319 }, + { url = "https://files.pythonhosted.org/packages/e2/aa/cc56fb53ebe14c64f1fe91d32d838d6f4db948b9494e200d2f61b820b85d/mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", size = 10859630 }, + { url = "https://files.pythonhosted.org/packages/04/c8/b19a760fab491c22c51975cf74e3d253b8c8ce2be7afaa2490fbf95a8c59/mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", size = 10037973 }, + { url = "https://files.pythonhosted.org/packages/88/57/7e7e39f2619c8f74a22efb9a4c4eff32b09d3798335625a124436d121d89/mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", size = 12416659 }, + { url = "https://files.pythonhosted.org/packages/fc/a6/37f7544666b63a27e46c48f49caeee388bf3ce95f9c570eb5cfba5234405/mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", size = 12897010 }, + { url = "https://files.pythonhosted.org/packages/84/8b/459a513badc4d34acb31c736a0101c22d2bd0697b969796ad93294165cfb/mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", size = 9562873 }, + { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335 }, + { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119 }, + { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856 }, + { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066 }, + { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000 }, + { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "nanoid" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/0250bf5935d88e214df469d35eccc0f6ff7e9db046fc8a9aeb4b2a192775/nanoid-2.0.0.tar.gz", hash = "sha256:5a80cad5e9c6e9ae3a41fa2fb34ae189f7cb420b2a5d8f82bd9d23466e4efa68", size = 3290 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/0d/8630f13998638dc01e187fadd2e5c6d42d127d08aeb4943d231664d6e539/nanoid-2.0.0-py3-none-any.whl", hash = "sha256:90aefa650e328cffb0893bbd4c236cfd44c48bc1f2d0b525ecc53c3187b653bb", size = 5844 }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, +] + +[[package]] +name = "networkx" +version = "3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/e6/b164f94c869d6b2c605b5128b7b0cfe912795a87fc90e78533920001f3ec/networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9", size = 2126579 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2", size = 1702396 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "numpy" +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, + { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, + { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, + { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, + { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, + { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, + { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, +] + +[[package]] +name = "nvidia-cublas-cu12" +version = "12.1.3.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728", size = 410594774 }, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e", size = 14109015 }, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2", size = 23671734 }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40", size = 823596 }, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.1.0.70" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, +] + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.0.2.54" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56", size = 121635161 }, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.2.106" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0", size = 56467784 }, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.4.5.107" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cusparse-cu12" }, + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 }, +] + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.1.0.106" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 }, +] + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.20.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/bb/d09dda47c881f9ff504afd6f9ca4f502ded6d8fc2f572cacc5e39da91c28/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01", size = 176238458 }, + { url = "https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56", size = 176249402 }, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.6.68" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/8c/69c9e39cd6bfa813852a94e9bd3c075045e2707d163e9dc2326c82d2c330/nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b3fd0779845f68b92063ab1393abab1ed0a23412fc520df79a8190d098b5cd6b", size = 19253287 }, + { url = "https://files.pythonhosted.org/packages/a8/48/a9775d377cb95585fb188b469387f58ba6738e268de22eae2ad4cedb2c41/nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_x86_64.whl", hash = "sha256:125a6c2a44e96386dda634e13d944e60b07a0402d391a070e8fb4104b34ea1ab", size = 19725597 }, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5", size = 99138 }, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/fa/fbf4001037904031639e6bfbfc02badfc7e12f137a8afa254df6c4c8a670/oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918", size = 177352 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/80/cab10959dc1faead58dc8384a781dfbf93cb4d33d50988f7a69f1b7c9bbe/oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", size = 151688 }, +] + +[[package]] +name = "onnxruntime" +version = "1.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coloredlogs" }, + { name = "flatbuffers" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "sympy" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/18/272d3d7406909141d3c9943796e3e97cafa53f4342d9231c0cfd8cb05702/onnxruntime-1.19.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:84fa57369c06cadd3c2a538ae2a26d76d583e7c34bdecd5769d71ca5c0fc750e", size = 16776408 }, + { url = "https://files.pythonhosted.org/packages/d8/d3/eb93f4ae511cfc725d0c69e07008800f8ac018de19ea1e497b306f174ccc/onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdc471a66df0c1cdef774accef69e9f2ca168c851ab5e4f2f3341512c7ef4666", size = 11491779 }, + { url = "https://files.pythonhosted.org/packages/ca/4b/ce5958074abe4b6e8d1da9c10e443e01a681558a9ec17e5cc7619438e094/onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e3a4ce906105d99ebbe817f536d50a91ed8a4d1592553f49b3c23c4be2560ae6", size = 13170428 }, + { url = "https://files.pythonhosted.org/packages/ce/0f/6df82dfe02467d12adbaa05c2bd17519c29c7df531ed600231f0c741ad22/onnxruntime-1.19.2-cp310-cp310-win32.whl", hash = "sha256:4b3d723cc154c8ddeb9f6d0a8c0d6243774c6b5930847cc83170bfe4678fafb3", size = 9591305 }, + { url = "https://files.pythonhosted.org/packages/3c/d8/68b63dc86b502169d017a86fe8bc718f4b0055ef1f6895bfaddd04f2eead/onnxruntime-1.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:17ed7382d2c58d4b7354fb2b301ff30b9bf308a1c7eac9546449cd122d21cae5", size = 11084902 }, + { url = "https://files.pythonhosted.org/packages/f0/ff/77bee5df55f034ee81d2e1bc58b2b8511b9c54f06ce6566cb562c5d95aa5/onnxruntime-1.19.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d863e8acdc7232d705d49e41087e10b274c42f09e259016a46f32c34e06dc4fd", size = 16779187 }, + { url = "https://files.pythonhosted.org/packages/f3/78/e29f5fb76e0f6524f3520e8e5b9d53282784b45d14068c5112db9f712b0a/onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dfe4f660a71b31caa81fc298a25f9612815215a47b286236e61d540350d7b6", size = 11496005 }, + { url = "https://files.pythonhosted.org/packages/60/ce/be4152da5c1030ab5a159a4a792ed9abad6ba498d79ef0aeba593ff7b5bf/onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a36511dc07c5c964b916697e42e366fa43c48cdb3d3503578d78cef30417cb84", size = 13167809 }, + { url = "https://files.pythonhosted.org/packages/e1/00/9740a074eb0e0a21ff13a2c4f32aecc5b21110b2c9b9177d8ac132b66e2d/onnxruntime-1.19.2-cp311-cp311-win32.whl", hash = "sha256:50cbb8dc69d6befad4746a69760e5b00cc3ff0a59c6c3fb27f8afa20e2cab7e7", size = 9591445 }, + { url = "https://files.pythonhosted.org/packages/1e/f5/9d995a685f97508b3254f17015b4a78641b0625e79480a7aed7a7a105d7c/onnxruntime-1.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:1c3e5d415b78337fa0b1b75291e9ea9fb2a4c1f148eb5811e7212fed02cfffa8", size = 11085695 }, + { url = "https://files.pythonhosted.org/packages/f2/a5/2a02687a88fc8a2507bef65876c90e96b9f8de5ba1f810acbf67c140fc67/onnxruntime-1.19.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:68e7051bef9cfefcbb858d2d2646536829894d72a4130c24019219442b1dd2ed", size = 16790434 }, + { url = "https://files.pythonhosted.org/packages/47/64/da42254ec14452cad2cdd4cf407094841c0a378c0d08944e9a36172197e9/onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2d366fbcc205ce68a8a3bde2185fd15c604d9645888703785b61ef174265168", size = 11486028 }, + { url = "https://files.pythonhosted.org/packages/b2/92/3574f6836f33b1b25f272293e72538c38451b12c2d9aa08630bb6bc0f057/onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:477b93df4db467e9cbf34051662a4b27c18e131fa1836e05974eae0d6e4cf29b", size = 13175054 }, + { url = "https://files.pythonhosted.org/packages/ff/c9/8c37e413a830cac7f7dc094fffbd0c998c8bcb66a6f0b0a3201a49bc742b/onnxruntime-1.19.2-cp312-cp312-win32.whl", hash = "sha256:9a174073dc5608fad05f7cf7f320b52e8035e73d80b0a23c80f840e5a97c0147", size = 9592681 }, + { url = "https://files.pythonhosted.org/packages/44/c0/59768846533786a82cafb38d8d2f900ad666bc91f0ae634774d286fa3c47/onnxruntime-1.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:190103273ea4507638ffc31d66a980594b237874b65379e273125150eb044857", size = 11086411 }, +] + +[[package]] +name = "openai" +version = "1.46.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/6b/470673304605c4aa4ee899dbab6ea12f6a0844e3ee7970ce7ecae0c89aea/openai-1.46.1.tar.gz", hash = "sha256:e5cf7f268bf516de23686d496c9dae7f0dcdcd0e87af4d288deeab8329fcbbaf", size = 297547 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/14/aabc614d8c446cb32043ed397dd8536a8f4c43a92c6609c05a958be9b960/openai-1.46.1-py3-none-any.whl", hash = "sha256:7517f07117cf66012bbc55c49fd6b983eaac0f3d2a09c90cba1140d4455e4290", size = 375150 }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "importlib-metadata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/0d/10357006dc10fc65f7c7b46c18232e466e355f9e606ac461cfc7193b4cbe/opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869", size = 60383 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b2/4bc5e52c9a23df0ac17dbb23923e609a8269cd67000a712b4f5bcfae1490/opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737", size = 59910 }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/a7/85ffaaacd712e4634fa1c56cbf79a02cf90b8a178fe1eee2cabfb0b7f44d/opentelemetry_exporter_otlp_proto_common-1.25.0.tar.gz", hash = "sha256:c93f4e30da4eee02bacd1e004eb82ce4da143a2f8e15b987a9f603e0a85407d3", size = 17152 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/02/74ac6619eec78c82a923324f916d3eccd2f2254cf4270b669e96b76bf717/opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl", hash = "sha256:15637b7d580c2675f70246563363775b4e6de947871e01d0f4e3881d1848d693", size = 17762 }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/76/94c72787f82130d1a24be64fad34eeaf7fa85cfe9d1740d096f577c5e57d/opentelemetry_exporter_otlp_proto_grpc-1.25.0.tar.gz", hash = "sha256:c0b1661415acec5af87625587efa1ccab68b873745ca0ee96b69bb1042087eac", size = 25278 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/f3/e24294e7b3f6d2b9aafc97b9b82e214dfe9ffa152dfecbd897e7ffbf6844/opentelemetry_exporter_otlp_proto_grpc-1.25.0-py3-none-any.whl", hash = "sha256:3131028f0c0a155a64c430ca600fd658e8e37043cb13209f0109db5c1a3e4eb4", size = 18223 }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/d9/1c3c518853c27d323a46813d3e99d601959ca2c6963d5217fe2110f0d579/opentelemetry_exporter_otlp_proto_http-1.25.0.tar.gz", hash = "sha256:9f8723859e37c75183ea7afa73a3542f01d0fd274a5b97487ea24cb683d7d684", size = 14048 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/b9/a47734f7c5a45619d8c64c227f119092b4679b2c49d37116fda7c0fc4573/opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl", hash = "sha256:2eca686ee11b27acd28198b3ea5e5863a53d1266b91cda47c839d95d5e0541a6", size = 16790 }, +] + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "prometheus-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/c6/d99f666dc1b90ab63c05658a68227da2b424dcca645e18ceee8340be0c59/opentelemetry_exporter_prometheus-0.46b0.tar.gz", hash = "sha256:28cc6456a5d5bf49c34be2f1d22bbc761c36af9b32d909ea5b4c13fe6deac47b", size = 14790 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/78/c5be5da62118edfe5425ae6b7b439c660fa11094dc2adfaaa935a22a3449/opentelemetry_exporter_prometheus-0.46b0-py3-none-any.whl", hash = "sha256:caefdeea5c4d52b72479710d22cc4c469d42fa1dba2f4a2e46ae0ebeaf51cd96", size = 12826 }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "setuptools" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/20/0a5d980843e048e9516443a91c63a559b40e5d50a730e73e72a5bde727fd/opentelemetry_instrumentation-0.46b0.tar.gz", hash = "sha256:974e0888fb2a1e01c38fbacc9483d024bb1132aad92d6d24e2e5543887a7adda", size = 24048 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/e5/d6fff0a6f6fbddf03c7fb48ab47925581c4f1a8268f9ad98e5ea4a8b90a5/opentelemetry_instrumentation-0.46b0-py3-none-any.whl", hash = "sha256:89cd721b9c18c014ca848ccd11181e6b3fd3f6c7669e35d59c48dc527408c18b", size = 29108 }, +] + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/10/24bedf812bc45f5e8a4a9d08e6c45f5b929ea11f93da1ca35dd5b2dff4cd/opentelemetry_instrumentation_asgi-0.46b0.tar.gz", hash = "sha256:02559f30cf4b7e2a737ab17eb52aa0779bcf4cc06573064f3e2cb4dcc7d3040a", size = 19785 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/8955c7fbd949e3ea1c186c7422047f675bf4f7c8976afd2fdf713183318e/opentelemetry_instrumentation_asgi-0.46b0-py3-none-any.whl", hash = "sha256:f13c55c852689573057837a9500aeeffc010c4ba59933c322e8f866573374759", size = 14289 }, +] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-instrumentation-asgi" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/08/37b14c610bb2ec8f7c381e15a0a4e4b8da3c9ecdbaa0b868085caba94d4f/opentelemetry_instrumentation_fastapi-0.46b0.tar.gz", hash = "sha256:928a883a36fc89f9702f15edce43d1a7104da93d740281e32d50ffd03dbb4365", size = 13706 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/96/905d575947342c4fd6781a28f6d7bc7f4f6670d45e3b1a85f8a06955c9ae/opentelemetry_instrumentation_fastapi-0.46b0-py3-none-any.whl", hash = "sha256:e0f5d150c6c36833dd011f0e6ef5ede6d7406c1aed0c7c98b2d3b38a018d1b33", size = 11342 }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3c/28c9ce40eb8ab287471af81659089ca98ef4f7ce289669e23b19c29f24a8/opentelemetry_proto-1.25.0.tar.gz", hash = "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3", size = 35062 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/ae/d6b5f11ecbffafe8b6d54130fed0cc78aad3711e00074d63a7359d6dcf3b/opentelemetry_proto-1.25.0-py3-none-any.whl", hash = "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f", size = 52450 }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/3c/77076b77f1d73141adc119f62370ec9456ef314ba0b4e7072e3775c36ef7/opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7", size = 141042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/b2/729a959a8aa032bce246c791f977161099ab60fb0188408ccec1bf283b00/opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9", size = 107028 }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/ea/a4a5277247b3d2ed2e23a58b0d509c2eafa4ebb56038ba5b23c0f9ea6242/opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa", size = 80198 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/41/28dae1ec1fe0151373f06bd06d9170ca14b52d5b3a6c2dc55f85bc219619/opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07", size = 130549 }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.46b0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/91/45bf243850463b2c83000ca129442255eaef7c446bd0f59a2ab54b15abff/opentelemetry_util_http-0.46b0.tar.gz", hash = "sha256:03b6e222642f9c7eae58d9132343e045b50aca9761fcb53709bd2b663571fdf6", size = 7387 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/7f/26d3d8880ea79adde8bb7bc306b25ca5134d6f6c3006ba464716405b4729/opentelemetry_util_http-0.46b0-py3-none-any.whl", hash = "sha256:8dc1949ce63caef08db84ae977fdc1848fe6dc38e6bbaad0ae3e6ecd0d451629", size = 6920 }, +] + +[[package]] +name = "orjson" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/b1/10d5314003aeac7cb27824f502eedcf4f2705efc1b38f70db247e9ff99b5/orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed", size = 4912978 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e8/7081fe58bcdecb5152ef7b4aa10c494ce904388f0755fcab0e6c10c18a8e/orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33", size = 252085 }, + { url = "https://files.pythonhosted.org/packages/52/22/3cb99c0226715f1e9c354f7a6e327362789c51595a382658ef1107693195/orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6", size = 147255 }, + { url = "https://files.pythonhosted.org/packages/f9/9f/1886012ed163a0fb1a5856e4834cea9965cdb2967fcc89f6885860b95f9b/orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd", size = 146198 }, + { url = "https://files.pythonhosted.org/packages/b3/f8/68aa7d369d7b5974008a3981e73c5cc73f85795178e731f3aeca1d1226b0/orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5", size = 150191 }, + { url = "https://files.pythonhosted.org/packages/e5/3f/65193bba7b06fcee2ecc1ac88c52b09040aa68e73ae1241ec6b4d4750071/orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b", size = 156524 }, + { url = "https://files.pythonhosted.org/packages/fb/c9/d43cc61ebd197d6b50bcc2f6dad8d4102eaf1750e1d7e52d0b7fa8296f0f/orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b", size = 144815 }, + { url = "https://files.pythonhosted.org/packages/38/a6/a93a4cea6978da5b693363f4cd1500445f843a7d3278f5a9ca36d43d5f7e/orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca", size = 329790 }, + { url = "https://files.pythonhosted.org/packages/22/f4/dbcea09636759a46789f2ed84d616fef9766a0167c40ae1c6cd417ebeddd/orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217", size = 311935 }, + { url = "https://files.pythonhosted.org/packages/e1/e4/23ccd2f859f1f9f1dbc7fa41a56e2a9653577d2c988e0b65d539ae851a06/orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a", size = 140063 }, + { url = "https://files.pythonhosted.org/packages/ed/71/9b653e2ac43769385e89a30153c5374e07788c6d8fc44bb616de427a8aa2/orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d", size = 139201 }, + { url = "https://files.pythonhosted.org/packages/3e/ae/a18a545261c308c3038fdec98b58186d48cf27e009a135c0cd70852fee1d/orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26", size = 252083 }, + { url = "https://files.pythonhosted.org/packages/dd/14/5556d32549d82c82664a28234318eb5cb3827b6527585f36ffe733c77b79/orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72", size = 147254 }, + { url = "https://files.pythonhosted.org/packages/d0/ac/9dc8a4d93af4bb80e1d1c62deb550ad7bd8fdaa21f98208c45a670de8891/orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337", size = 146202 }, + { url = "https://files.pythonhosted.org/packages/4a/d3/2715231bb5bd61c6dc8f9e80f0683935981d1ef172975a6058121f5ea413/orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134", size = 150191 }, + { url = "https://files.pythonhosted.org/packages/53/ef/24aece17ec36d0e81fb50afb407501723c4aeda057499866960c9af78d2d/orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c", size = 156526 }, + { url = "https://files.pythonhosted.org/packages/d1/57/d18e9d6e627a3bf9a6f3d98f300da10a0a414dcc2dbfe4c41f90fc6c3e44/orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7", size = 144816 }, + { url = "https://files.pythonhosted.org/packages/dc/5b/477650506378258cbceb0c3b7f48dd48c2f173adb0a09090a9270853b229/orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747", size = 329784 }, + { url = "https://files.pythonhosted.org/packages/1c/47/b70ec6b8eb7a3e711f4c722b026d50d639948a352ccd40863fcd1391c405/orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302", size = 311939 }, + { url = "https://files.pythonhosted.org/packages/8f/d9/e40f98f6ecb3b0fdc637d9dd25d0ffaa3f6f97913487ef6b7baff67f7dfb/orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63", size = 140084 }, + { url = "https://files.pythonhosted.org/packages/0d/7b/566fa00c023652b4a1d409a26cc4dd71bcdbea86814ba4a05ff6ac6a2c0a/orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f", size = 139211 }, + { url = "https://files.pythonhosted.org/packages/6d/6c/9ebe2c59a07614099f65315be8a4856fe0245e264b0d4b7c90a34d3fb3fe/orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998", size = 252341 }, + { url = "https://files.pythonhosted.org/packages/d4/e7/552668b791e8908b57a0994e6c53aa76b9d4b4211ead0b8a6de2f20d85ac/orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700", size = 147243 }, + { url = "https://files.pythonhosted.org/packages/86/0c/609d36429fd68b5d705a1e055b1ae8afb91cbd3fdfe9b89820bbfcbd834b/orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0", size = 146362 }, + { url = "https://files.pythonhosted.org/packages/2d/59/d2dbde3dc8e02454316f7ea15c33501f152ad93aa40d2a307f5672c813c0/orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98", size = 150396 }, + { url = "https://files.pythonhosted.org/packages/b9/41/853bbdde934a86bf3bc63912954787576a7f042f61526f3f83f6c56127d2/orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344", size = 156628 }, + { url = "https://files.pythonhosted.org/packages/b8/bc/817fa04a899144d815723538f7dbcd004999576e18fd83f12dd5da553db7/orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e", size = 145043 }, + { url = "https://files.pythonhosted.org/packages/47/c4/592c4344520e64c1b27021dbe6ea2ecbe5d9111fb24ad384938b6b6b5408/orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570", size = 329750 }, + { url = "https://files.pythonhosted.org/packages/0d/71/53b35214f7dd56145ce8400ce6076c21c43620c6e0c1054c10838c596739/orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a", size = 312176 }, + { url = "https://files.pythonhosted.org/packages/d6/32/87eb4e17f047e712f462f680c2de9cfd2d9bfdd6b014e6148a6eb5eec984/orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7", size = 140131 }, + { url = "https://files.pythonhosted.org/packages/b9/38/74cd869bab2dff3d94828eefaaf54c55a2a12675b77600bc83225d8441bf/orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9", size = 139360 }, +] + +[[package]] +name = "overrides" +version = "7.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 }, +] + +[[package]] +name = "packaging" +version = "24.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 }, +] + +[[package]] +name = "pandas" +version = "2.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/d9/ecf715f34c73ccb1d8ceb82fc01cd1028a65a5f6dbc57bfa6ea155119058/pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54", size = 4398391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/2d/39600d073ea70b9cafdc51fab91d69c72b49dd92810f24cb5ac6631f387f/pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce", size = 12551798 }, + { url = "https://files.pythonhosted.org/packages/fd/4b/0cd38e68ab690b9df8ef90cba625bf3f93b82d1c719703b8e1b333b2c72d/pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238", size = 11287392 }, + { url = "https://files.pythonhosted.org/packages/01/c6/d3d2612aea9b9f28e79a30b864835dad8f542dcf474eee09afeee5d15d75/pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08", size = 15634823 }, + { url = "https://files.pythonhosted.org/packages/89/1b/12521efcbc6058e2673583bb096c2b5046a9df39bd73eca392c1efed24e5/pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0", size = 13032214 }, + { url = "https://files.pythonhosted.org/packages/e4/d7/303dba73f1c3a9ef067d23e5afbb6175aa25e8121be79be354dcc740921a/pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51", size = 16278302 }, + { url = "https://files.pythonhosted.org/packages/ba/df/8ff7c5ed1cc4da8c6ab674dc8e4860a4310c3880df1283e01bac27a4333d/pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99", size = 13892866 }, + { url = "https://files.pythonhosted.org/packages/69/a6/81d5dc9a612cf0c1810c2ebc4f2afddb900382276522b18d128213faeae3/pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772", size = 11621592 }, + { url = "https://files.pythonhosted.org/packages/1b/70/61704497903d43043e288017cb2b82155c0d41e15f5c17807920877b45c2/pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288", size = 12574808 }, + { url = "https://files.pythonhosted.org/packages/16/c6/75231fd47afd6b3f89011e7077f1a3958441264aca7ae9ff596e3276a5d0/pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151", size = 11304876 }, + { url = "https://files.pythonhosted.org/packages/97/2d/7b54f80b93379ff94afb3bd9b0cd1d17b48183a0d6f98045bc01ce1e06a7/pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b", size = 15602548 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4d82be566f069d7a9a702dcdf6f9106df0e0b042e738043c0cc7ddd7e3f6/pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee", size = 13031332 }, + { url = "https://files.pythonhosted.org/packages/92/a2/b79c48f530673567805e607712b29814b47dcaf0d167e87145eb4b0118c6/pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db", size = 16286054 }, + { url = "https://files.pythonhosted.org/packages/40/c7/47e94907f1d8fdb4868d61bd6c93d57b3784a964d52691b77ebfdb062842/pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1", size = 13879507 }, + { url = "https://files.pythonhosted.org/packages/ab/63/966db1321a0ad55df1d1fe51505d2cdae191b84c907974873817b0a6e849/pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24", size = 11634249 }, + { url = "https://files.pythonhosted.org/packages/dd/49/de869130028fb8d90e25da3b7d8fb13e40f5afa4c4af1781583eb1ff3839/pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef", size = 12500886 }, + { url = "https://files.pythonhosted.org/packages/db/7c/9a60add21b96140e22465d9adf09832feade45235cd22f4cb1668a25e443/pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce", size = 11340320 }, + { url = "https://files.pythonhosted.org/packages/b0/85/f95b5f322e1ae13b7ed7e97bd999160fa003424711ab4dc8344b8772c270/pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad", size = 15204346 }, + { url = "https://files.pythonhosted.org/packages/40/10/79e52ef01dfeb1c1ca47a109a01a248754ebe990e159a844ece12914de83/pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad", size = 12733396 }, + { url = "https://files.pythonhosted.org/packages/35/9d/208febf8c4eb5c1d9ea3314d52d8bd415fd0ef0dd66bb24cc5bdbc8fa71a/pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76", size = 15858913 }, + { url = "https://files.pythonhosted.org/packages/99/d1/2d9bd05def7a9e08a92ec929b5a4c8d5556ec76fae22b0fa486cbf33ea63/pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32", size = 13417786 }, + { url = "https://files.pythonhosted.org/packages/22/a5/a0b255295406ed54269814bc93723cfd1a0da63fb9aaf99e1364f07923e5/pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23", size = 11498828 }, +] + +[[package]] +name = "pandas-stubs" +version = "2.2.2.240909" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "types-pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/d9/0682716a9ba539b78748f026c523ae5f280fc478381f7f1c6c037d0f0fc3/pandas_stubs-2.2.2.240909.tar.gz", hash = "sha256:3c0951a2c3e45e3475aed9d80b7147ae82f176b9e42e9fb321cfdebf3d411b3d", size = 103599 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/2d/68d58d819798b466e16c01ca934deada2f9165fb3d062f83abbef2f8067e/pandas_stubs-2.2.2.240909-py3-none-any.whl", hash = "sha256:e230f5fa4065f9417804f4d65cd98f86c002efcc07933e8abcd48c3fad9c30a2", size = 157811 }, +] + +[[package]] +name = "parameterized" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/49/00c0c0cc24ff4266025a53e41336b79adaa5a4ebfad214f433d623f9865e/parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1", size = 24351 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2f/804f58f0b856ab3bf21617cccf5b39206e6c4c94c2cd227bde125ea6105f/parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b", size = 20475 }, +] + +[[package]] +name = "parso" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 }, +] + +[[package]] +name = "passlib" +version = "1.7.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554 }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, +] + +[[package]] +name = "pillow" +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271 }, + { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658 }, + { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075 }, + { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808 }, + { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290 }, + { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163 }, + { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100 }, + { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880 }, + { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218 }, + { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487 }, + { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265 }, + { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655 }, + { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304 }, + { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804 }, + { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126 }, + { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541 }, + { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616 }, + { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802 }, + { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213 }, + { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498 }, + { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350 }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980 }, + { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799 }, + { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973 }, + { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054 }, + { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484 }, + { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375 }, + { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773 }, + { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690 }, + { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951 }, + { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427 }, + { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685 }, + { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883 }, + { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837 }, + { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562 }, + { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761 }, + { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767 }, + { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989 }, + { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255 }, + { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603 }, + { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972 }, + { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375 }, + { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889 }, + { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160 }, + { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020 }, + { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539 }, + { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125 }, + { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373 }, + { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "portalocker" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423 }, +] + +[[package]] +name = "posthog" +version = "3.6.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backoff" }, + { name = "monotonic" }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/3b/9894166fbf290fcb9e253dd54c12d1fdef8c723eb5bd1ce8039211de5dd4/posthog-3.6.6.tar.gz", hash = "sha256:1e04783293117109189ad7048f3eedbe21caff0e39bee5e2d47a93dd790fefac", size = 49583 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/3e/53cf7e3ead6c0a5f4064fa4e697a2985bace0aae0ed428983774213a3485/posthog-3.6.6-py2.py3-none-any.whl", hash = "sha256:38834fd7f0732582a20d4eb4674c8d5c088e464d14d1b3f8c176e389aecaa4ef", size = 54258 }, +] + +[[package]] +name = "pre-commit" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/10/97ee2fa54dff1e9da9badbc5e35d0bbaef0776271ea5907eccf64140f72f/pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af", size = 177815 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/92/caae8c86e94681b42c246f0bca35c059a2f0529e5b92619f6aba4cf7e7b6/pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f", size = 204643 }, +] + +[[package]] +name = "prometheus-client" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/39/3be07741a33356127c4fe633768ee450422c1231c6d34b951fee1458308d/prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89", size = 78278 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/98/745b810d822103adca2df8decd4c0bbe839ba7ad3511af3f0d09692fc0f0/prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7", size = 54474 }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/47/6d/0279b119dafc74c1220420028d490c4399b790fc1256998666e3a341879f/prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360", size = 425859 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/23/22750c4b768f09386d1c3cc4337953e8936f48a888fa6dddfb669b2c9088/prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10", size = 386411 }, +] + +[[package]] +name = "proto-plus" +version = "1.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3e/fc/e9a65cd52c1330d8d23af6013651a0bc50b6d76bcbdf91fae7cd19c68f29/proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445", size = 55942 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/6f/db31f0711c0402aa477257205ce7d29e86a75cb52cd19f7afb585f75cda0/proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12", size = 50080 }, +] + +[[package]] +name = "protobuf" +version = "4.25.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/dd/48d5fdb68ec74d70fabcc252e434492e56f70944d9f17b6a15e3746d2295/protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584", size = 380315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/35/1b3c5a5e6107859c4ca902f4fbb762e48599b78129a05d20684fef4a4d04/protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8", size = 392457 }, + { url = "https://files.pythonhosted.org/packages/a7/ad/bf3f358e90b7e70bf7fb520702cb15307ef268262292d3bdb16ad8ebc815/protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea", size = 413449 }, + { url = "https://files.pythonhosted.org/packages/51/49/d110f0a43beb365758a252203c43eaaad169fe7749da918869a8c991f726/protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173", size = 394248 }, + { url = "https://files.pythonhosted.org/packages/c6/ab/0f384ca0bc6054b1a7b6009000ab75d28a5506e4459378b81280ae7fd358/protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d", size = 293717 }, + { url = "https://files.pythonhosted.org/packages/05/a6/094a2640be576d760baa34c902dcb8199d89bce9ed7dd7a6af74dcbbd62d/protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331", size = 294635 }, + { url = "https://files.pythonhosted.org/packages/33/90/f198a61df8381fb43ae0fe81b3d2718e8dcc51ae8502c7657ab9381fbc4f/protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", size = 156467 }, +] + +[[package]] +name = "psutil" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/c7/8c6872f7372eb6a6b2e4708b88419fb46b857f7a2e1892966b851cc79fc9/psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", size = 508067 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/66/78c9c3020f573c58101dc43a44f6855d01bbbd747e24da2f0c4491200ea3/psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", size = 249766 }, + { url = "https://files.pythonhosted.org/packages/e1/3f/2403aa9558bea4d3854b0e5e567bc3dd8e9fbc1fc4453c0aa9aafeb75467/psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", size = 253024 }, + { url = "https://files.pythonhosted.org/packages/0b/37/f8da2fbd29690b3557cca414c1949f92162981920699cd62095a984983bf/psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", size = 250961 }, + { url = "https://files.pythonhosted.org/packages/35/56/72f86175e81c656a01c4401cd3b1c923f891b31fbcebe98985894176d7c9/psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", size = 287478 }, + { url = "https://files.pythonhosted.org/packages/19/74/f59e7e0d392bc1070e9a70e2f9190d652487ac115bb16e2eff6b22ad1d24/psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", size = 290455 }, + { url = "https://files.pythonhosted.org/packages/cd/5f/60038e277ff0a9cc8f0c9ea3d0c5eb6ee1d2470ea3f9389d776432888e47/psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132", size = 292046 }, + { url = "https://files.pythonhosted.org/packages/8b/20/2ff69ad9c35c3df1858ac4e094f20bd2374d33c8643cf41da8fd7cdcb78b/psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d", size = 253560 }, + { url = "https://files.pythonhosted.org/packages/73/44/561092313ae925f3acfaace6f9ddc4f6a9c748704317bad9c8c8f8a36a79/psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3", size = 257399 }, + { url = "https://files.pythonhosted.org/packages/7c/06/63872a64c312a24fb9b4af123ee7007a306617da63ff13bcc1432386ead7/psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0", size = 251988 }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, +] + +[[package]] +name = "pulsar-client" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/aa/eb3b04be87b961324e49748f3a715a12127d45d76258150bfa61b2a002d8/pulsar_client-3.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c18552edb2f785de85280fe624bc507467152bff810fc81d7660fa2dfa861f38", size = 10953552 }, + { url = "https://files.pythonhosted.org/packages/cc/20/d59bf89ccdda45edd89f5b54bd1e93605ebe5ad3cb73f4f4f5e8eca8f9e6/pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18d438e456c146f01be41ef146f649dedc8f7bc714d9eaef94cff2e34099812b", size = 5190714 }, + { url = "https://files.pythonhosted.org/packages/1a/02/ca7e96b97d564d0375b8e3de65f95ac86c8502c40f6ff750e9d145709d9a/pulsar_client-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18a26a0719841103c7a89eb1492c4a8fedf89adaa386375baecbb4fa2707e88f", size = 5429820 }, + { url = "https://files.pythonhosted.org/packages/47/f3/682670cdc951b830cd3d8d1287521997327254e59508772664aaa656e246/pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab0e1605dc5f44a126163fd06cd0a768494ad05123f6e0de89a2c71d6e2d2319", size = 5710427 }, + { url = "https://files.pythonhosted.org/packages/bc/00/119cd039286dfc1c91a5580963e9ba79204cd4717b16b7a6fdc57d1c1673/pulsar_client-3.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdef720891b97656fdce3bf5913ea7729b2156b84ba64314f432c1e72c6117fa", size = 5916490 }, + { url = "https://files.pythonhosted.org/packages/0a/cc/d606b483dbb263cbaf7fc7c3d2ec4032628cf3324266cf9a4ccdb2a73076/pulsar_client-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a42544e38773191fe550644a90e8050579476bb2dcf17ac69a4aed62a6cb70e7", size = 3305387 }, + { url = "https://files.pythonhosted.org/packages/0d/2e/aec6886a6d67f09230476182399b7fad694fbcbbaf004ce914725d4eddd9/pulsar_client-3.5.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:fd94432ea5d398ea78f8f2e09a217ec5058d26330c137a22690478c031e116da", size = 10954116 }, + { url = "https://files.pythonhosted.org/packages/43/06/b98df9300f60e5fad3396f843dd633c31176a495a2d60ba111c99511658a/pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6252ae462e07ece4071213fdd9c76eab82ca522a749f2dc678037d4cbacd40b", size = 5189618 }, + { url = "https://files.pythonhosted.org/packages/72/05/c9aef7da7802a03c0b65ffe8f00a24289ff992f99ed5d5d1fd0ed63d9cf6/pulsar_client-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b4d440b2d74323784328b082872ee2f206c440b5d224d7941eb3c083ec06c6", size = 5429329 }, + { url = "https://files.pythonhosted.org/packages/06/96/9acfe6f1d827cdd53b8460b04c63b4081333ef64a49a2f425419f1eb6b6b/pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f60af840b8d64a2fac5a0c1ce6ae0ddffec5f42267c6ded2c5e74bad8345f2a1", size = 5710106 }, + { url = "https://files.pythonhosted.org/packages/e1/7b/877a06eff5c9ac828cdb75e378ee29b0adac9328da9ee173eaf7076d8c56/pulsar_client-3.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2277a447c3b7f6571cb1eb9fc5c25da3fdd43d0b2fb91cf52054adfadc7d6842", size = 5916541 }, + { url = "https://files.pythonhosted.org/packages/fb/62/ed1da1ef72c95ba6a830e43995550ed0a1d26c223fb4b036ac6cd028c2ed/pulsar_client-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:f20f3e9dd50db2a37059abccad42078b7a4754b8bc1d3ae6502e71c1ad2209f0", size = 3305485 }, + { url = "https://files.pythonhosted.org/packages/81/19/4b145766df706aa5e09f60bbf5f87b934e6ac950fddd18f4acd520c465b9/pulsar_client-3.5.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:d61f663d85308e12f44033ba95af88730f581a7e8da44f7a5c080a3aaea4878d", size = 10967548 }, + { url = "https://files.pythonhosted.org/packages/bf/bd/9bc05ee861b46884554a4c61f96edb9602de131dd07982c27920e554ab5b/pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1ba0be25b6f747bcb28102b7d906ec1de48dc9f1a2d9eacdcc6f44ab2c9e17", size = 5189598 }, + { url = "https://files.pythonhosted.org/packages/76/00/379bedfa6f1c810553996a4cb0984fa2e2c89afc5953df0936e1c9636003/pulsar_client-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a181e3e60ac39df72ccb3c415d7aeac61ad0286497a6e02739a560d5af28393a", size = 5430145 }, + { url = "https://files.pythonhosted.org/packages/88/c8/8a37d75aa9132a69a28061c9e5f4b516328a1968b58bbae018f431c6d3d4/pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3c72895ff7f51347e4f78b0375b2213fa70dd4790bbb78177b4002846f1fd290", size = 5708960 }, + { url = "https://files.pythonhosted.org/packages/6e/9a/abd98661e3f7ae3a8e1d3fb0fc7eba1a30005391ebd575ab06a66021256c/pulsar_client-3.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:547dba1b185a17eba915e51d0a3aca27c80747b6187e5cd7a71a3ca33921decc", size = 5915227 }, + { url = "https://files.pythonhosted.org/packages/a2/51/db376181d05716de595515fac736e3d06e96d3345ba0e31c0a90c352eae1/pulsar_client-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:443b786eed96bc86d2297a6a42e79f39d1abf217ec603e0bd303f3488c0234af", size = 3306515 }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, +] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335 }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "2.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928 }, +] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/8b/d3ae387f66277bd8104096d6ec0a145f4baa2966ebb2cad746c0920c9526/pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b", size = 1867835 }, + { url = "https://files.pythonhosted.org/packages/46/76/f68272e4c3a7df8777798282c5e47d508274917f29992d84e1898f8908c7/pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166", size = 1776689 }, + { url = "https://files.pythonhosted.org/packages/cc/69/5f945b4416f42ea3f3bc9d2aaec66c76084a6ff4ff27555bf9415ab43189/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb", size = 1800748 }, + { url = "https://files.pythonhosted.org/packages/50/ab/891a7b0054bcc297fb02d44d05c50e68154e31788f2d9d41d0b72c89fdf7/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916", size = 1806469 }, + { url = "https://files.pythonhosted.org/packages/31/7c/6e3fa122075d78f277a8431c4c608f061881b76c2b7faca01d317ee39b5d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07", size = 2002246 }, + { url = "https://files.pythonhosted.org/packages/ad/6f/22d5692b7ab63fc4acbc74de6ff61d185804a83160adba5e6cc6068e1128/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232", size = 2659404 }, + { url = "https://files.pythonhosted.org/packages/11/ac/1e647dc1121c028b691028fa61a4e7477e6aeb5132628fde41dd34c1671f/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2", size = 2053940 }, + { url = "https://files.pythonhosted.org/packages/91/75/984740c17f12c3ce18b5a2fcc4bdceb785cce7df1511a4ce89bca17c7e2d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f", size = 1921437 }, + { url = "https://files.pythonhosted.org/packages/a0/74/13c5f606b64d93f0721e7768cd3e8b2102164866c207b8cd6f90bb15d24f/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3", size = 1966129 }, + { url = "https://files.pythonhosted.org/packages/18/03/9c4aa5919457c7b57a016c1ab513b1a926ed9b2bb7915bf8e506bf65c34b/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071", size = 2110908 }, + { url = "https://files.pythonhosted.org/packages/92/2c/053d33f029c5dc65e5cf44ff03ceeefb7cce908f8f3cca9265e7f9b540c8/pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119", size = 1735278 }, + { url = "https://files.pythonhosted.org/packages/de/81/7dfe464eca78d76d31dd661b04b5f2036ec72ea8848dd87ab7375e185c23/pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f", size = 1917453 }, + { url = "https://files.pythonhosted.org/packages/5d/30/890a583cd3f2be27ecf32b479d5d615710bb926d92da03e3f7838ff3e58b/pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8", size = 1865160 }, + { url = "https://files.pythonhosted.org/packages/1d/9a/b634442e1253bc6889c87afe8bb59447f106ee042140bd57680b3b113ec7/pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d", size = 1776777 }, + { url = "https://files.pythonhosted.org/packages/75/9a/7816295124a6b08c24c96f9ce73085032d8bcbaf7e5a781cd41aa910c891/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e", size = 1799244 }, + { url = "https://files.pythonhosted.org/packages/a9/8f/89c1405176903e567c5f99ec53387449e62f1121894aa9fc2c4fdc51a59b/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607", size = 1805307 }, + { url = "https://files.pythonhosted.org/packages/d5/a5/1a194447d0da1ef492e3470680c66048fef56fc1f1a25cafbea4bc1d1c48/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd", size = 2000663 }, + { url = "https://files.pythonhosted.org/packages/13/a5/1df8541651de4455e7d587cf556201b4f7997191e110bca3b589218745a5/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea", size = 2655941 }, + { url = "https://files.pythonhosted.org/packages/44/31/a3899b5ce02c4316865e390107f145089876dff7e1dfc770a231d836aed8/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e", size = 2052105 }, + { url = "https://files.pythonhosted.org/packages/1b/aa/98e190f8745d5ec831f6d5449344c48c0627ac5fed4e5340a44b74878f8e/pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b", size = 1919967 }, + { url = "https://files.pythonhosted.org/packages/ae/35/b6e00b6abb2acfee3e8f85558c02a0822e9a8b2f2d812ea8b9079b118ba0/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0", size = 1964291 }, + { url = "https://files.pythonhosted.org/packages/13/46/7bee6d32b69191cd649bbbd2361af79c472d72cb29bb2024f0b6e350ba06/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64", size = 2109666 }, + { url = "https://files.pythonhosted.org/packages/39/ef/7b34f1b122a81b68ed0a7d0e564da9ccdc9a2924c8d6c6b5b11fa3a56970/pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f", size = 1732940 }, + { url = "https://files.pythonhosted.org/packages/2f/76/37b7e76c645843ff46c1d73e046207311ef298d3f7b2f7d8f6ac60113071/pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3", size = 1916804 }, + { url = "https://files.pythonhosted.org/packages/74/7b/8e315f80666194b354966ec84b7d567da77ad927ed6323db4006cf915f3f/pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231", size = 1856459 }, + { url = "https://files.pythonhosted.org/packages/14/de/866bdce10ed808323d437612aca1ec9971b981e1c52e5e42ad9b8e17a6f6/pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee", size = 1770007 }, + { url = "https://files.pythonhosted.org/packages/dc/69/8edd5c3cd48bb833a3f7ef9b81d7666ccddd3c9a635225214e044b6e8281/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87", size = 1790245 }, + { url = "https://files.pythonhosted.org/packages/80/33/9c24334e3af796ce80d2274940aae38dd4e5676298b4398eff103a79e02d/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8", size = 1801260 }, + { url = "https://files.pythonhosted.org/packages/a5/6f/e9567fd90104b79b101ca9d120219644d3314962caa7948dd8b965e9f83e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327", size = 1996872 }, + { url = "https://files.pythonhosted.org/packages/2d/ad/b5f0fe9e6cfee915dd144edbd10b6e9c9c9c9d7a56b69256d124b8ac682e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2", size = 2661617 }, + { url = "https://files.pythonhosted.org/packages/06/c8/7d4b708f8d05a5cbfda3243aad468052c6e99de7d0937c9146c24d9f12e9/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36", size = 2071831 }, + { url = "https://files.pythonhosted.org/packages/89/4d/3079d00c47f22c9a9a8220db088b309ad6e600a73d7a69473e3a8e5e3ea3/pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126", size = 1917453 }, + { url = "https://files.pythonhosted.org/packages/e9/88/9df5b7ce880a4703fcc2d76c8c2d8eb9f861f79d0c56f4b8f5f2607ccec8/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e", size = 1968793 }, + { url = "https://files.pythonhosted.org/packages/e3/b9/41f7efe80f6ce2ed3ee3c2dcfe10ab7adc1172f778cc9659509a79518c43/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24", size = 2116872 }, + { url = "https://files.pythonhosted.org/packages/63/08/b59b7a92e03dd25554b0436554bf23e7c29abae7cce4b1c459cd92746811/pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84", size = 1738535 }, + { url = "https://files.pythonhosted.org/packages/88/8d/479293e4d39ab409747926eec4329de5b7129beaedc3786eca070605d07f/pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9", size = 1917992 }, + { url = "https://files.pythonhosted.org/packages/ad/ef/16ee2df472bf0e419b6bc68c05bf0145c49247a1095e85cee1463c6a44a1/pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc", size = 1856143 }, + { url = "https://files.pythonhosted.org/packages/da/fa/bc3dbb83605669a34a93308e297ab22be82dfb9dcf88c6cf4b4f264e0a42/pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd", size = 1770063 }, + { url = "https://files.pythonhosted.org/packages/4e/48/e813f3bbd257a712303ebdf55c8dc46f9589ec74b384c9f652597df3288d/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05", size = 1790013 }, + { url = "https://files.pythonhosted.org/packages/b4/e0/56eda3a37929a1d297fcab1966db8c339023bcca0b64c5a84896db3fcc5c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d", size = 1801077 }, + { url = "https://files.pythonhosted.org/packages/04/be/5e49376769bfbf82486da6c5c1683b891809365c20d7c7e52792ce4c71f3/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510", size = 1996782 }, + { url = "https://files.pythonhosted.org/packages/bc/24/e3ee6c04f1d58cc15f37bcc62f32c7478ff55142b7b3e6d42ea374ea427c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6", size = 2661375 }, + { url = "https://files.pythonhosted.org/packages/c1/f8/11a9006de4e89d016b8de74ebb1db727dc100608bb1e6bbe9d56a3cbbcce/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b", size = 2071635 }, + { url = "https://files.pythonhosted.org/packages/7c/45/bdce5779b59f468bdf262a5bc9eecbae87f271c51aef628d8c073b4b4b4c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327", size = 1916994 }, + { url = "https://files.pythonhosted.org/packages/d8/fa/c648308fe711ee1f88192cad6026ab4f925396d1293e8356de7e55be89b5/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6", size = 1968877 }, + { url = "https://files.pythonhosted.org/packages/16/16/b805c74b35607d24d37103007f899abc4880923b04929547ae68d478b7f4/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f", size = 2116814 }, + { url = "https://files.pythonhosted.org/packages/d1/58/5305e723d9fcdf1c5a655e6a4cc2a07128bf644ff4b1d98daf7a9dbf57da/pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769", size = 1738360 }, + { url = "https://files.pythonhosted.org/packages/a5/ae/e14b0ff8b3f48e02394d8acd911376b7b66e164535687ef7dc24ea03072f/pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5", size = 1919411 }, + { url = "https://files.pythonhosted.org/packages/13/a9/5d582eb3204464284611f636b55c0a7410d748ff338756323cb1ce721b96/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5", size = 1857135 }, + { url = "https://files.pythonhosted.org/packages/2c/57/faf36290933fe16717f97829eabfb1868182ac495f99cf0eda9f59687c9d/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec", size = 1740583 }, + { url = "https://files.pythonhosted.org/packages/91/7c/d99e3513dc191c4fec363aef1bf4c8af9125d8fa53af7cb97e8babef4e40/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480", size = 1793637 }, + { url = "https://files.pythonhosted.org/packages/29/18/812222b6d18c2d13eebbb0f7cdc170a408d9ced65794fdb86147c77e1982/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068", size = 1941963 }, + { url = "https://files.pythonhosted.org/packages/0f/36/c1f3642ac3f05e6bb4aec3ffc399fa3f84895d259cf5f0ce3054b7735c29/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801", size = 1915332 }, + { url = "https://files.pythonhosted.org/packages/f7/ca/9c0854829311fb446020ebb540ee22509731abad886d2859c855dd29b904/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728", size = 1957926 }, + { url = "https://files.pythonhosted.org/packages/c0/1c/7836b67c42d0cd4441fcd9fafbf6a027ad4b79b6559f80cf11f89fd83648/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433", size = 2100342 }, + { url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/27/0bed9dd26b93328b60a1402febc780e7be72b42847fa8b5c94b7d0aeb6d1/pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0", size = 70938 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/8d/29e82e333f32d9e2051c10764b906c2a6cd140992910b5f49762790911ba/pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907", size = 26864 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pyparsing" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/08/13f3bce01b2061f2bbd582c9df82723de943784cf719a35ac886c652043a/pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032", size = 900231 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/0c/0e3c05b1c87bb6a1c76d281b0f35e78d2d80ac91b5f8f524cebf77f51049/pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c", size = 104100 }, +] + +[[package]] +name = "pypdf" +version = "4.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f0/65/2ed7c9e1d31d860f096061b3dd2d665f501e09faaa0409a3f0d719d2a16d/pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b", size = 293266 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/60/eccdd92dd4af3e4bea6d6a342f7588c618a15b9bec4b968af581e498bcc4/pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418", size = 295825 }, +] + +[[package]] +name = "pyperclip" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961 } + +[[package]] +name = "pypika" +version = "0.48.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259 } + +[[package]] +name = "pyproject-hooks" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/07/6f63dda440d4abb191b91dc383b472dae3dd9f37e4c1e4a5c3db150531c6/pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965", size = 7838 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2", size = 9184 }, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, +] + +[[package]] +name = "pysbd" +version = "0.3.4" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/0a/c99fb7d7e176f8b176ef19704a32e6a9c6aafdf19ef75a187f701fc15801/pysbd-0.3.4-py3-none-any.whl", hash = "sha256:cd838939b7b0b185fcf86b0baf6636667dfb6e474743beeff878e9f42e022953", size = 71082 }, +] + +[[package]] +name = "pytest" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/c6cf50ce320cf8611df7a1254d86233b3df7cc07f9b5f5cbcb82e08aa534/pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276", size = 49855 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024 }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, +] + +[[package]] +name = "pytest-flakefinder" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ec/53/69c56a93ea057895b5761c5318455804873a6cd9d796d7c55d41c2358125/pytest-flakefinder-1.1.0.tar.gz", hash = "sha256:e2412a1920bdb8e7908783b20b3d57e9dad590cc39a93e8596ffdd493b403e0e", size = 6795 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/8b/06787150d0fd0cbd3a8054262b56f91631c7778c1bc91bf4637e47f909ad/pytest_flakefinder-1.1.0-py2.py3-none-any.whl", hash = "sha256:741e0e8eea427052f5b8c89c2b3c3019a50c39a59ce4df6a305a2c2d9ba2bd13", size = 4644 }, +] + +[[package]] +name = "pytest-instafail" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/bd/e0ba6c3cd20b9aa445f0af229f3a9582cce589f083537978a23e6f14e310/pytest-instafail-0.5.0.tar.gz", hash = "sha256:33a606f7e0c8e646dc3bfee0d5e3a4b7b78ef7c36168cfa1f3d93af7ca706c9e", size = 5849 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/c0/c32dc39fc172e684fdb3d30169843efb65c067be1e12689af4345731126e/pytest_instafail-0.5.0-py3-none-any.whl", hash = "sha256:6855414487e9e4bb76a118ce952c3c27d3866af15487506c4ded92eb72387819", size = 4176 }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, +] + +[[package]] +name = "pytest-profiling" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gprof2dot" }, + { name = "pytest" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29", size = 30985 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/71/cdb746eaee0d3be65fd777b4ac821f5f051063f3084d4a200ecfd7f7ab40/pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019", size = 8255 }, +] + +[[package]] +name = "pytest-split" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/a9/7b8327a605a48d5094b331de10e01f1f1aec0cb8c5790b5e4d98ac327f05/pytest_split-0.9.0.tar.gz", hash = "sha256:ca52527e4d9024f6ec3aba723527bd276d12096024999b1f5b8445a38da1e81c", size = 13599 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/67/8cff7bf04d78ac7fbb88b0985061347943dc3cbeafada27b4accb4527579/pytest_split-0.9.0-py3-none-any.whl", hash = "sha256:9e197df601828d76a1ab615158d9c6253ec9f96e46c1d3ea27187aa5ac0ef9de", size = 11790 }, +] + +[[package]] +name = "pytest-sugar" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pytest" }, + { name = "termcolor" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171 }, +] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/c4/3c310a19bc1f1e9ef50075582652673ef2bfc8cd62afef9585683821902f/pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d", size = 84060 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/82/1d96bf03ee4c0fdc3c0cbe61470070e659ca78dc0086fb88b66c185e2449/pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7", size = 46108 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-docx" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "lxml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/e4/386c514c53684772885009c12b67a7edd526c15157778ac1b138bc75063e/python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd", size = 5656581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/3d/330d9efbdb816d3f60bf2ad92f05e1708e4a1b9abe80461ac3444c83f749/python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe", size = 244315 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "python-jose" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/19/b2c86504116dc5f0635d29f802da858404d77d930a25633d2e86a64a35b3/python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a", size = 129068 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/2d/e94b2f7bab6773c70efc70a61d66e312e1febccd9e0db6b9e0adf58cbad1/python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a", size = 33530 }, +] + +[[package]] +name = "python-multipart" +version = "0.0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/94/bb4778be5d4c18329d60276d4e58b3974e2dce8ec3bee5569bfe9c81f36e/python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9", size = 31129 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/35/142fff3d85da49377ada6936ad9b776263549ab22656969b2fcd0bdb10f7/python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49", size = 22191 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "pywin32" +version = "306" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/dc/28c668097edfaf4eac4617ef7adf081b9cf50d254672fcf399a70f5efc41/pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d", size = 8506422 }, + { url = "https://files.pythonhosted.org/packages/d3/d6/891894edec688e72c2e308b3243fad98b4066e1839fd2fe78f04129a9d31/pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8", size = 9226392 }, + { url = "https://files.pythonhosted.org/packages/8b/1e/fc18ad83ca553e01b97aa8393ff10e33c1fb57801db05488b83282ee9913/pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407", size = 8507689 }, + { url = "https://files.pythonhosted.org/packages/7e/9e/ad6b1ae2a5ad1066dc509350e0fbf74d8d50251a51e420a2a8feaa0cecbd/pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e", size = 9227547 }, + { url = "https://files.pythonhosted.org/packages/91/20/f744bff1da8f43388498503634378dbbefbe493e65675f2cc52f7185c2c2/pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a", size = 10388324 }, + { url = "https://files.pythonhosted.org/packages/14/91/17e016d5923e178346aabda3dfec6629d1a26efe587d19667542105cf0a6/pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b", size = 8507705 }, + { url = "https://files.pythonhosted.org/packages/83/1c/25b79fc3ec99b19b0a0730cc47356f7e2959863bf9f3cd314332bddb4f68/pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e", size = 9227429 }, + { url = "https://files.pythonhosted.org/packages/1c/43/e3444dc9a12f8365d9603c2145d16bf0a2f8180f343cf87be47f5579e547/pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040", size = 10388145 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "pyzmq" +version = "26.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/05/bed626b9f7bb2322cdbbf7b4bd8f54b1b617b0d2ab2d3547d6e39428a48e/pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f", size = 271975 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/a8/9837c39aba390eb7d01924ace49d761c8dbe7bc2d6082346d00c8332e431/pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629", size = 1340058 }, + { url = "https://files.pythonhosted.org/packages/a2/1f/a006f2e8e4f7d41d464272012695da17fb95f33b54342612a6890da96ff6/pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b", size = 1008818 }, + { url = "https://files.pythonhosted.org/packages/b6/09/b51b6683fde5ca04593a57bbe81788b6b43114d8f8ee4e80afc991e14760/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764", size = 673199 }, + { url = "https://files.pythonhosted.org/packages/c9/78/486f3e2e824f3a645238332bf5a4c4b4477c3063033a27c1e4052358dee2/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c", size = 911762 }, + { url = "https://files.pythonhosted.org/packages/5e/3b/2eb1667c9b866f53e76ee8b0c301b0469745a23bd5a87b7ee3d5dd9eb6e5/pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a", size = 868773 }, + { url = "https://files.pythonhosted.org/packages/16/29/ca99b4598a9dc7e468b5417eda91f372b595be1e3eec9b7cbe8e5d3584e8/pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88", size = 868834 }, + { url = "https://files.pythonhosted.org/packages/ad/e5/9efaeb1d2f4f8c50da04144f639b042bc52869d3a206d6bf672ab3522163/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f", size = 1202861 }, + { url = "https://files.pythonhosted.org/packages/c3/62/c721b5608a8ac0a69bb83cbb7d07a56f3ff00b3991a138e44198a16f94c7/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282", size = 1515304 }, + { url = "https://files.pythonhosted.org/packages/87/84/e8bd321aa99b72f48d4606fc5a0a920154125bd0a4608c67eab742dab087/pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea", size = 1414712 }, + { url = "https://files.pythonhosted.org/packages/cd/cd/420e3fd1ac6977b008b72e7ad2dae6350cc84d4c5027fc390b024e61738f/pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2", size = 578113 }, + { url = "https://files.pythonhosted.org/packages/5c/57/73930d56ed45ae0cb4946f383f985c855c9b3d4063f26416998f07523c0e/pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971", size = 641631 }, + { url = "https://files.pythonhosted.org/packages/61/d2/ae6ac5c397f1ccad59031c64beaafce7a0d6182e0452cc48f1c9c87d2dd0/pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa", size = 543528 }, + { url = "https://files.pythonhosted.org/packages/12/20/de7442172f77f7c96299a0ac70e7d4fb78cd51eca67aa2cf552b66c14196/pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218", size = 1340639 }, + { url = "https://files.pythonhosted.org/packages/98/4d/5000468bd64c7910190ed0a6c76a1ca59a68189ec1f007c451dc181a22f4/pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4", size = 1008710 }, + { url = "https://files.pythonhosted.org/packages/e1/bf/c67fd638c2f9fbbab8090a3ee779370b97c82b84cc12d0c498b285d7b2c0/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef", size = 673129 }, + { url = "https://files.pythonhosted.org/packages/86/94/99085a3f492aa538161cbf27246e8886ff850e113e0c294a5b8245f13b52/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317", size = 910107 }, + { url = "https://files.pythonhosted.org/packages/31/1d/346809e8a9b999646d03f21096428453465b1bca5cd5c64ecd048d9ecb01/pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf", size = 867960 }, + { url = "https://files.pythonhosted.org/packages/ab/68/6fb6ae5551846ad5beca295b7bca32bf0a7ce19f135cb30e55fa2314e6b6/pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e", size = 869204 }, + { url = "https://files.pythonhosted.org/packages/0f/f9/18417771dee223ccf0f48e29adf8b4e25ba6d0e8285e33bcbce078070bc3/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37", size = 1203351 }, + { url = "https://files.pythonhosted.org/packages/e0/46/f13e67fe0d4f8a2315782cbad50493de6203ea0d744610faf4d5f5b16e90/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3", size = 1514204 }, + { url = "https://files.pythonhosted.org/packages/50/11/ddcf7343b7b7a226e0fc7b68cbf5a5bb56291fac07f5c3023bb4c319ebb4/pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6", size = 1414339 }, + { url = "https://files.pythonhosted.org/packages/01/14/1c18d7d5b7be2708f513f37c61bfadfa62161c10624f8733f1c8451b3509/pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4", size = 576928 }, + { url = "https://files.pythonhosted.org/packages/3b/1b/0a540edd75a41df14ec416a9a500b9fec66e554aac920d4c58fbd5756776/pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5", size = 642317 }, + { url = "https://files.pythonhosted.org/packages/98/77/1cbfec0358078a4c5add529d8a70892db1be900980cdb5dd0898b3d6ab9d/pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003", size = 543834 }, + { url = "https://files.pythonhosted.org/packages/28/2f/78a766c8913ad62b28581777ac4ede50c6d9f249d39c2963e279524a1bbe/pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9", size = 1343105 }, + { url = "https://files.pythonhosted.org/packages/b7/9c/4b1e2d3d4065be715e007fe063ec7885978fad285f87eae1436e6c3201f4/pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52", size = 1008365 }, + { url = "https://files.pythonhosted.org/packages/4f/ef/5a23ec689ff36d7625b38d121ef15abfc3631a9aecb417baf7a4245e4124/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08", size = 665923 }, + { url = "https://files.pythonhosted.org/packages/ae/61/d436461a47437d63c6302c90724cf0981883ec57ceb6073873f32172d676/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5", size = 903400 }, + { url = "https://files.pythonhosted.org/packages/47/42/fc6d35ecefe1739a819afaf6f8e686f7f02a4dd241c78972d316f403474c/pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae", size = 860034 }, + { url = "https://files.pythonhosted.org/packages/07/3b/44ea6266a6761e9eefaa37d98fabefa112328808ac41aa87b4bbb668af30/pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711", size = 860579 }, + { url = "https://files.pythonhosted.org/packages/38/6f/4df2014ab553a6052b0e551b37da55166991510f9e1002c89cab7ce3b3f2/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6", size = 1196246 }, + { url = "https://files.pythonhosted.org/packages/38/9d/ee240fc0c9fe9817f0c9127a43238a3e28048795483c403cc10720ddef22/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3", size = 1507441 }, + { url = "https://files.pythonhosted.org/packages/85/4f/01711edaa58d535eac4a26c294c617c9a01f09857c0ce191fd574d06f359/pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b", size = 1406498 }, + { url = "https://files.pythonhosted.org/packages/07/18/907134c85c7152f679ed744e73e645b365f3ad571f38bdb62e36f347699a/pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7", size = 575533 }, + { url = "https://files.pythonhosted.org/packages/ce/2c/a6f4a20202a4d3c582ad93f95ee78d79bbdc26803495aec2912b17dbbb6c/pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a", size = 637768 }, + { url = "https://files.pythonhosted.org/packages/5f/0e/eb16ff731632d30554bf5af4dbba3ffcd04518219d82028aea4ae1b02ca5/pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b", size = 540675 }, + { url = "https://files.pythonhosted.org/packages/04/a7/0f7e2f6c126fe6e62dbae0bc93b1bd3f1099cf7fea47a5468defebe3f39d/pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726", size = 1006564 }, + { url = "https://files.pythonhosted.org/packages/31/b6/a187165c852c5d49f826a690857684333a6a4a065af0a6015572d2284f6a/pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3", size = 1340447 }, + { url = "https://files.pythonhosted.org/packages/68/ba/f4280c58ff71f321602a6e24fd19879b7e79793fb8ab14027027c0fb58ef/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50", size = 665485 }, + { url = "https://files.pythonhosted.org/packages/77/b5/c987a5c53c7d8704216f29fc3d810b32f156bcea488a940e330e1bcbb88d/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb", size = 903484 }, + { url = "https://files.pythonhosted.org/packages/29/c9/07da157d2db18c72a7eccef8e684cefc155b712a88e3d479d930aa9eceba/pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187", size = 859981 }, + { url = "https://files.pythonhosted.org/packages/43/09/e12501bd0b8394b7d02c41efd35c537a1988da67fc9c745cae9c6c776d31/pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b", size = 860334 }, + { url = "https://files.pythonhosted.org/packages/eb/ff/f5ec1d455f8f7385cc0a8b2acd8c807d7fade875c14c44b85c1bddabae21/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18", size = 1196179 }, + { url = "https://files.pythonhosted.org/packages/ec/8a/bb2ac43295b1950fe436a81fc5b298be0b96ac76fb029b514d3ed58f7b27/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115", size = 1507668 }, + { url = "https://files.pythonhosted.org/packages/a9/49/dbc284ebcfd2dca23f6349227ff1616a7ee2c4a35fe0a5d6c3deff2b4fed/pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e", size = 1406539 }, + { url = "https://files.pythonhosted.org/packages/00/68/093cdce3fe31e30a341d8e52a1ad86392e13c57970d722c1f62a1d1a54b6/pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5", size = 575567 }, + { url = "https://files.pythonhosted.org/packages/92/ae/6cc4657148143412b5819b05e362ae7dd09fb9fe76e2a539dcff3d0386bc/pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad", size = 637551 }, + { url = "https://files.pythonhosted.org/packages/6c/67/fbff102e201688f97c8092e4c3445d1c1068c2f27bbd45a578df97ed5f94/pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797", size = 540378 }, + { url = "https://files.pythonhosted.org/packages/3f/fe/2d998380b6e0122c6c4bdf9b6caf490831e5f5e2d08a203b5adff060c226/pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a", size = 1007378 }, + { url = "https://files.pythonhosted.org/packages/4a/f4/30d6e7157f12b3a0390bde94d6a8567cdb88846ed068a6e17238a4ccf600/pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc", size = 1329532 }, + { url = "https://files.pythonhosted.org/packages/82/86/3fe917870e15ee1c3ad48229a2a64458e36036e64b4afa9659045d82bfa8/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5", size = 653242 }, + { url = "https://files.pythonhosted.org/packages/50/2d/242e7e6ef6c8c19e6cb52d095834508cd581ffb925699fd3c640cdc758f1/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672", size = 888404 }, + { url = "https://files.pythonhosted.org/packages/ac/11/7270566e1f31e4ea73c81ec821a4b1688fd551009a3d2bab11ec66cb1e8f/pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797", size = 845858 }, + { url = "https://files.pythonhosted.org/packages/91/d5/72b38fbc69867795c8711bdd735312f9fef1e3d9204e2f63ab57085434b9/pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386", size = 847375 }, + { url = "https://files.pythonhosted.org/packages/dd/9a/10ed3c7f72b4c24e719c59359fbadd1a27556a28b36cdf1cd9e4fb7845d5/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306", size = 1183489 }, + { url = "https://files.pythonhosted.org/packages/72/2d/8660892543fabf1fe41861efa222455811adac9f3c0818d6c3170a1153e3/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6", size = 1492932 }, + { url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 }, + { url = "https://files.pythonhosted.org/packages/53/fb/36b2b2548286e9444e52fcd198760af99fd89102b5be50f0660fcfe902df/pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072", size = 906955 }, + { url = "https://files.pythonhosted.org/packages/77/8f/6ce54f8979a01656e894946db6299e2273fcee21c8e5fa57c6295ef11f57/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1", size = 565701 }, + { url = "https://files.pythonhosted.org/packages/ee/1c/bf8cd66730a866b16db8483286078892b7f6536f8c389fb46e4beba0a970/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d", size = 794312 }, + { url = "https://files.pythonhosted.org/packages/71/43/91fa4ff25bbfdc914ab6bafa0f03241d69370ef31a761d16bb859f346582/pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca", size = 752775 }, + { url = "https://files.pythonhosted.org/packages/ec/d2/3b2ab40f455a256cb6672186bea95cd97b459ce4594050132d71e76f0d6f/pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c", size = 550762 }, +] + +[[package]] +name = "qdrant-client" +version = "1.11.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "httpx", extra = ["http2"] }, + { name = "numpy" }, + { name = "portalocker" }, + { name = "pydantic" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/19/286cc74a86681e41c1bbcbc720c6f3191e6e0cf9ca17473542c657b541ae/qdrant_client-1.11.2.tar.gz", hash = "sha256:0d5aa3f778077762963a754459c9c7144ba48e13dea62e559323924126a1b4a4", size = 229220 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/a9/a701893ed83a90a5fefd1190430c36b97df22387dfa871113d8d2428f469/qdrant_client-1.11.2-py3-none-any.whl", hash = "sha256:3151e3da61588ad138dfcd6760c2f13e57251c8b0c62001bfd0e03bb7bcd6c8e", size = 258914 }, +] + +[[package]] +name = "redis" +version = "5.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/10/defc227d65ea9c2ff5244645870859865cba34da7373477c8376629746ec/redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870", size = 4595651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/d1/19a9c76811757684a0f74adc25765c8a901d67f9f6472ac9d57c844a23c8/redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4", size = 255608 }, +] + +[[package]] +name = "regex" +version = "2023.12.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/39/31626e7e75b187fae7f121af3c538a991e725c744ac893cc2cfd70ce2853/regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5", size = 394706 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/d6/3d8fb38120053e4d7b196f32fa5c3a760f8349cdee02c021617e6e653e61/regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5", size = 497367 }, + { url = "https://files.pythonhosted.org/packages/8a/8d/8c70bce12045fa622949d3fd3e4e64a01b506a3e670dada8c5f9b3be1e34/regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8", size = 296412 }, + { url = "https://files.pythonhosted.org/packages/3d/d8/e5f7fcd33adaa3ce346ff5baf4319956873c49cbb0ed11566f921883096b/regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586", size = 291028 }, + { url = "https://files.pythonhosted.org/packages/2e/15/58c7b42d4ebc85b88696483c739d2c3b1db7234d7ab3c1aef50cf9b88d51/regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c", size = 774099 }, + { url = "https://files.pythonhosted.org/packages/40/ef/acde6b823da62186d4309de039e470e3f08311e5b40b754aec187d82939f/regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400", size = 814912 }, + { url = "https://files.pythonhosted.org/packages/7a/00/8b2322e246d0a392c91bdb43750bb900fab5d48d693c1497b3ea6656f851/regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e", size = 800527 }, + { url = "https://files.pythonhosted.org/packages/81/8a/96a62ce98e8ff1b16db56fde3debc8a571f6b7ea42ee137eb0d995cdfa26/regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4", size = 773955 }, + { url = "https://files.pythonhosted.org/packages/d6/3b/909ab8c13caf117cab2d494f4e0ba5c973a66014b15e8ccd5ec1a704f179/regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5", size = 762996 }, + { url = "https://files.pythonhosted.org/packages/3f/b1/df76e0c38fcb7b64b23bd86de820c1cfa7b3b35005122b468df8e93f2bfa/regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd", size = 690363 }, + { url = "https://files.pythonhosted.org/packages/a4/db/7d05718f5157257ee9f980d381f54efdaccb95c0db8e05071ce4d8ee3347/regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704", size = 743898 }, + { url = "https://files.pythonhosted.org/packages/2d/06/8c07ade57639bd30543b96715a0c1eef72d65aabdf7ff6f0b6b1f8bd371f/regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1", size = 731377 }, + { url = "https://files.pythonhosted.org/packages/05/3c/e77e4c13492d34171af2765c4263d35573b4b8d813f58bb33dae3da5c897/regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392", size = 764034 }, + { url = "https://files.pythonhosted.org/packages/b8/5d/d2f0a1091c00ee5a854199423609c69eaa8b48a8352a6626c0ae85265b6a/regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423", size = 768580 }, + { url = "https://files.pythonhosted.org/packages/b5/51/e884e1e021a8819251e09606354733a62decffd703ad6fd1ed9098a003a0/regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f", size = 744705 }, + { url = "https://files.pythonhosted.org/packages/ac/fc/b7b7da0eb7110d1c4529b9d74d5d1ba92f85f0ce32be72f490f5eebfcdab/regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630", size = 257749 }, + { url = "https://files.pythonhosted.org/packages/83/eb/144d2db5cf2ac3989d0ea4273040218d68bd67422133548da47043423594/regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105", size = 269481 }, + { url = "https://files.pythonhosted.org/packages/27/98/e2f151d958bea25682118c68f22e49fe98d8797aadfbf0d5df0288118c6d/regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6", size = 497418 }, + { url = "https://files.pythonhosted.org/packages/dc/c2/b3c89e9c8933ceb2a8f56fcd25f1133f21d8e490fbdbd76160dfc2c83a6e/regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97", size = 296466 }, + { url = "https://files.pythonhosted.org/packages/60/9e/4b0223e05776aa3be806a902093b2ab1de3ba26b652d92065d5c7e1d4df3/regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887", size = 291038 }, + { url = "https://files.pythonhosted.org/packages/9b/71/b55b5ffc75918a96ea99794783524609ac3ff9e2d8f51e7ece8648a968f6/regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb", size = 783871 }, + { url = "https://files.pythonhosted.org/packages/c1/69/b9671621092a1f9b16892bc638368efb3ce00648ce79b91d472feaa740c9/regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c", size = 823445 }, + { url = "https://files.pythonhosted.org/packages/8d/fc/8ade283909c52f795bdc9b9fe44f85c6da5417f9be84c3d245706406551e/regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b", size = 810161 }, + { url = "https://files.pythonhosted.org/packages/8d/6b/2f6478814954c07c04ba60b78d688d3d7bab10d786e0b6c1db607e4f6673/regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa", size = 785105 }, + { url = "https://files.pythonhosted.org/packages/2a/3a/9601d6e8a49ce7a124268c4c79d54f22416242e5096cd4fca07f7bfac46b/regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7", size = 772823 }, + { url = "https://files.pythonhosted.org/packages/c8/b5/882aa0697e46d29a9f796c91221e03b1beec3c29664718c7d26ce05e7fb8/regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0", size = 749953 }, + { url = "https://files.pythonhosted.org/packages/00/d4/d876ce23d76103db84f3b2aeb3cba7c6b9b5750a2e2125ef6bfa2be53deb/regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe", size = 738427 }, + { url = "https://files.pythonhosted.org/packages/70/0f/311ada39601c7bd7904b6ab3b01b414438a16efab5f2009f35a273999942/regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80", size = 770450 }, + { url = "https://files.pythonhosted.org/packages/e3/66/29a1feac5c69907fedd6b3d8562d5ddc7c28fdf8585da6484617fe4c0b5e/regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd", size = 776326 }, + { url = "https://files.pythonhosted.org/packages/97/33/101559f6506a98b55613efa484d072d23fdeca3ef6876d43a8c49c7ec65f/regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4", size = 752942 }, + { url = "https://files.pythonhosted.org/packages/92/2a/6431462df58f29515be33fa8b3800efa73b2be47664e71af557101e2a733/regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87", size = 257757 }, + { url = "https://files.pythonhosted.org/packages/a8/01/18232f93672c1d530834e2e0568a80eaab1df12d67ae499b1762ab462b5c/regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f", size = 269492 }, + { url = "https://files.pythonhosted.org/packages/8b/b8/14527ca54351156f65c90f8728ee62e646a484dbce0e4cbffb34489e5bb0/regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715", size = 500440 }, + { url = "https://files.pythonhosted.org/packages/0b/d4/5498d06a7a05be1b3e1e553d60fb61292afe5ca9fdc2aea5283f30651f1b/regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d", size = 298103 }, + { url = "https://files.pythonhosted.org/packages/66/65/90e759a89534b850fa20e533e587748e967c44f58333b40f6d62718df1b1/regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a", size = 292245 }, + { url = "https://files.pythonhosted.org/packages/b5/29/ddfd602f350a5f71926fec1f6f1ba9f5fcc7a05b36b364009904a119dfc7/regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a", size = 786060 }, + { url = "https://files.pythonhosted.org/packages/1b/aa/f9beeee2217de48fd47d68fc5ea9655f66440b33fa8212bad42427fe3587/regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5", size = 829520 }, + { url = "https://files.pythonhosted.org/packages/a2/da/2b04560d91bdf49d3ca519c08db68a5d37d02e526b491f1a5c179ec3d21d/regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060", size = 814727 }, + { url = "https://files.pythonhosted.org/packages/fe/4e/242050c3ff38c08f16b31a5a338525def3f85b819fc0c5a97c35217098a7/regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3", size = 789110 }, + { url = "https://files.pythonhosted.org/packages/f9/ef/14fcc5f19b0e72b64d4d530ae9bb8ba9739f6ced9c80d061c68ff93d5ebc/regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9", size = 777017 }, + { url = "https://files.pythonhosted.org/packages/48/d7/41efecdd60b117d60618620b0d2af5d0638d1955c9266a5492235ed38fc8/regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f", size = 751262 }, + { url = "https://files.pythonhosted.org/packages/8d/4d/5546af3d7b50ccc10eb511bec0a1029821882be76c49d8c79116163e6a62/regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c", size = 742481 }, + { url = "https://files.pythonhosted.org/packages/c6/b2/5f135bae42695796b5b68eb7d1aa00d39d16c39e1a60a3e0892ac8c73edc/regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457", size = 775170 }, + { url = "https://files.pythonhosted.org/packages/12/ea/73cc9fea46f631a2b36347b7de9d20c9120a45b53924496fe75b9b467682/regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf", size = 779331 }, + { url = "https://files.pythonhosted.org/packages/fa/53/b473865d5b44d1395874f0b88df5143def8ef2f7bd11424083260aa93461/regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d", size = 759727 }, + { url = "https://files.pythonhosted.org/packages/64/c7/700257786f4d4974993364469438ac7498288c2b4aa683dc3230de3fd42d/regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5", size = 258108 }, + { url = "https://files.pythonhosted.org/packages/1d/af/4bd17254cdda1d8092460ee5561f013c4ca9c33ecf1aab81b44280327cab/regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232", size = 268934 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, +] + +[[package]] +name = "respx" +version = "0.21.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/72/979e475ade69bcbb18288604aacbdc77b44b3bd1133e2c16660282a9f4b8/respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af", size = 28306 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/5c/428523509b26c243c1e93aa2ae385def597ef1fbdbbd47978430ba19037d/respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20", size = 25130 }, +] + +[[package]] +name = "rich" +version = "13.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/76/40f084cb7db51c9d1fa29a7120717892aeda9a7711f6225692c957a93535/rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a", size = 222080 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/11/dadb85e2bd6b1f1ae56669c3e1f0410797f9605d752d68fb47b77f525b31/rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06", size = 241608 }, +] + +[[package]] +name = "rsa" +version = "4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, +] + +[[package]] +name = "ruff" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/7c/3045a526c57cef4b5ec4d5d154692e31429749a49810a53e785de334c4f6/ruff-0.6.7.tar.gz", hash = "sha256:44e52129d82266fa59b587e2cd74def5637b730a69c4542525dfdecfaae38bd5", size = 3073785 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/c4/1c5c636f83f905c537785016e9cdd7a36df53c025a2d07940580ecb37bcf/ruff-0.6.7-py3-none-linux_armv6l.whl", hash = "sha256:08277b217534bfdcc2e1377f7f933e1c7957453e8a79764d004e44c40db923f2", size = 10336748 }, + { url = "https://files.pythonhosted.org/packages/84/d9/aa15a56be7ad796f4d7625362aff588f9fc013bbb7323a63571628a2cf2d/ruff-0.6.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c6707a32e03b791f4448dc0dce24b636cbcdee4dd5607adc24e5ee73fd86c00a", size = 9958833 }, + { url = "https://files.pythonhosted.org/packages/27/25/5dd1c32bfc3ad3136c8ebe84312d1bdd2e6c908ac7f60692ec009b7050a8/ruff-0.6.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:533d66b7774ef224e7cf91506a7dafcc9e8ec7c059263ec46629e54e7b1f90ab", size = 9633369 }, + { url = "https://files.pythonhosted.org/packages/0e/3e/01b25484f3cb08fe6fddedf1f55f3f3c0af861a5b5f5082fbe60ab4b2596/ruff-0.6.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17a86aac6f915932d259f7bec79173e356165518859f94649d8c50b81ff087e9", size = 10637415 }, + { url = "https://files.pythonhosted.org/packages/8a/c9/5bb9b849e4777e0f961de43edf95d2af0ab34999a5feee957be096887876/ruff-0.6.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3f8822defd260ae2460ea3832b24d37d203c3577f48b055590a426a722d50ef", size = 10097389 }, + { url = "https://files.pythonhosted.org/packages/52/cf/e08f1c290c7d848ddfb2ae811f24f445c18e1d3e50e01c38ffa7f5a50494/ruff-0.6.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ba4efe5c6dbbb58be58dd83feedb83b5e95c00091bf09987b4baf510fee5c99", size = 10951440 }, + { url = "https://files.pythonhosted.org/packages/a2/2d/ca8aa0da5841913c302d8034c6de0ce56c401c685184d8dd23cfdd0003f9/ruff-0.6.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:525201b77f94d2b54868f0cbe5edc018e64c22563da6c5c2e5c107a4e85c1c0d", size = 11708900 }, + { url = "https://files.pythonhosted.org/packages/89/fc/9a83c57baee977c82392e19a328b52cebdaf61601af3d99498e278ef5104/ruff-0.6.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8854450839f339e1049fdbe15d875384242b8e85d5c6947bb2faad33c651020b", size = 11258892 }, + { url = "https://files.pythonhosted.org/packages/d3/a3/254cc7afef702c68ae9079290c2a1477ae0e81478589baf745026d8a4eb5/ruff-0.6.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f0b62056246234d59cbf2ea66e84812dc9ec4540518e37553513392c171cb18", size = 12367932 }, + { url = "https://files.pythonhosted.org/packages/9f/55/53f10c1bd8c3b2ae79aed18e62b22c6346f9296aa0ec80489b8442bd06a9/ruff-0.6.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b1462fa56c832dc0cea5b4041cfc9c97813505d11cce74ebc6d1aae068de36b", size = 10838629 }, + { url = "https://files.pythonhosted.org/packages/84/72/fb335c2b25432c63d15383ecbd7bfc1915e68cdf8d086a08042052144255/ruff-0.6.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:02b083770e4cdb1495ed313f5694c62808e71764ec6ee5db84eedd82fd32d8f5", size = 10648824 }, + { url = "https://files.pythonhosted.org/packages/92/a8/d57e135a8ad99b6a0c6e2a5c590bcacdd57f44340174f4409c3893368610/ruff-0.6.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c05fd37013de36dfa883a3854fae57b3113aaa8abf5dea79202675991d48624", size = 10174368 }, + { url = "https://files.pythonhosted.org/packages/a7/6f/1a30a6e81dcf2fa9ff3f7011eb87fe76c12a3c6bba74db6a1977d763de1f/ruff-0.6.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f49c9caa28d9bbfac4a637ae10327b3db00f47d038f3fbb2195c4d682e925b14", size = 10514383 }, + { url = "https://files.pythonhosted.org/packages/0b/25/df6f2575bc9fe43a6dedfd8dee12896f09a94303e2c828d5f85856bb69a0/ruff-0.6.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a0e1655868164e114ba43a908fd2d64a271a23660195017c17691fb6355d59bb", size = 10902340 }, + { url = "https://files.pythonhosted.org/packages/68/62/f2c1031e2fb7b94f9bf0603744e73db4ef90081b0eb1b9639a6feefd52ea/ruff-0.6.7-py3-none-win32.whl", hash = "sha256:a939ca435b49f6966a7dd64b765c9df16f1faed0ca3b6f16acdf7731969deb35", size = 8448033 }, + { url = "https://files.pythonhosted.org/packages/97/80/193d1604a3f7d75eb1b2a7ce6bf0fdbdbc136889a65caacea6ffb29501b1/ruff-0.6.7-py3-none-win_amd64.whl", hash = "sha256:590445eec5653f36248584579c06252ad2e110a5d1f32db5420de35fb0e1c977", size = 9273543 }, + { url = "https://files.pythonhosted.org/packages/8e/a8/4abb5a9f58f51e4b1ea386be5ab2e547035bc1ee57200d1eca2f8909a33e/ruff-0.6.7-py3-none-win_arm64.whl", hash = "sha256:b28f0d5e2f771c1fe3c7a45d3f53916fc74a480698c4b5731f0bea61e52137c8", size = 8618044 }, +] + +[[package]] +name = "s3transfer" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/67/94c6730ee4c34505b14d94040e2f31edf144c230b6b49e971b4f25ff8fab/s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6", size = 144095 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/4a/b221409913760d26cf4498b7b1741d510c82d3ad38381984a3ddc135ec66/s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69", size = 82716 }, +] + +[[package]] +name = "safetensors" +version = "0.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/46/a1c56ed856c6ac3b1a8b37abe5be0cac53219367af1331e721b04d122577/safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310", size = 65702 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/10/0798ec2c8704c2d172620d8a3725bed92cdd75516357b1a3e64d4229ea4e/safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7", size = 392312 }, + { url = "https://files.pythonhosted.org/packages/2b/9e/9648d8dbb485c40a4a0212b7537626ae440b48156cc74601ca0b7a7615e0/safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27", size = 381858 }, + { url = "https://files.pythonhosted.org/packages/8b/67/49556aeacc00df353767ed31d68b492fecf38c3f664c52692e4d92aa0032/safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761", size = 441382 }, + { url = "https://files.pythonhosted.org/packages/5d/ce/e9f4869a37bb11229e6cdb4e73a6ef23b4f360eee9dca5f7e40982779704/safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c", size = 439001 }, + { url = "https://files.pythonhosted.org/packages/a0/27/aee8cf031b89c34caf83194ec6b7f2eed28d053fff8b6da6d00c85c56035/safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56", size = 478026 }, + { url = "https://files.pythonhosted.org/packages/da/33/1d9fc4805c623636e7d460f28eec92ebd1856f7a552df8eb78398a1ef4de/safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737", size = 495545 }, + { url = "https://files.pythonhosted.org/packages/b9/df/6f766b56690709d22e83836e4067a1109a7d84ea152a6deb5692743a2805/safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5", size = 435016 }, + { url = "https://files.pythonhosted.org/packages/90/fa/7bc3f18086201b1e55a42c88b822ae197d0158e12c54cd45c887305f1b7e/safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b", size = 456273 }, + { url = "https://files.pythonhosted.org/packages/3e/59/2ae50150d37a65c1c5f01aec74dc737707b8bbecdc76307e5a1a12c8a376/safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6", size = 619669 }, + { url = "https://files.pythonhosted.org/packages/fe/43/10f0bb597aef62c9c154152e265057089f3c729bdd980e6c32c3ec2407a4/safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163", size = 605212 }, + { url = "https://files.pythonhosted.org/packages/7c/75/ede6887ea0ceaba55730988bfc7668dc147a8758f907fa6db26fbb681b8e/safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc", size = 272652 }, + { url = "https://files.pythonhosted.org/packages/ba/f0/919c72a9eef843781e652d0650f2819039943e69b69d5af2d0451a23edc3/safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1", size = 285879 }, + { url = "https://files.pythonhosted.org/packages/9a/a5/25bcf75e373412daf1fd88045ab3aa8140a0d804ef0e70712c4f2c5b94d8/safetensors-0.4.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:21f848d7aebd5954f92538552d6d75f7c1b4500f51664078b5b49720d180e47c", size = 392256 }, + { url = "https://files.pythonhosted.org/packages/08/8c/ece3bf8756506a890bd980eca02f47f9d98dfbf5ce16eda1368f53560f67/safetensors-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb07000b19d41e35eecef9a454f31a8b4718a185293f0d0b1c4b61d6e4487971", size = 381490 }, + { url = "https://files.pythonhosted.org/packages/39/83/c4a7ce01d626e46ea2b45887f2e59b16441408031e2ce2f9fe01860c6946/safetensors-0.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09dedf7c2fda934ee68143202acff6e9e8eb0ddeeb4cfc24182bef999efa9f42", size = 441093 }, + { url = "https://files.pythonhosted.org/packages/47/26/cc52de647e71bd9a0b0d78ead0d31d9c462b35550a817aa9e0cab51d6db4/safetensors-0.4.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59b77e4b7a708988d84f26de3ebead61ef1659c73dcbc9946c18f3b1786d2688", size = 438960 }, + { url = "https://files.pythonhosted.org/packages/06/78/332538546775ee97e749867df2d58f2282d9c48a1681e4891eed8b94ec94/safetensors-0.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d3bc83e14d67adc2e9387e511097f254bd1b43c3020440e708858c684cbac68", size = 478031 }, + { url = "https://files.pythonhosted.org/packages/d9/03/a3c8663f1ddda54e624ecf43fce651659b49e8e1603c52c3e464b442acfa/safetensors-0.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39371fc551c1072976073ab258c3119395294cf49cdc1f8476794627de3130df", size = 494754 }, + { url = "https://files.pythonhosted.org/packages/e6/ee/69e498a892f208bd1da4104d4b9be887f8611bf4942144718b6738482250/safetensors-0.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c19feda32b931cae0acd42748a670bdf56bee6476a046af20181ad3fee4090", size = 435013 }, + { url = "https://files.pythonhosted.org/packages/a2/61/f0cfce984515b86d1260f556ba3b782158e2855e6a318446ac2613786fa9/safetensors-0.4.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a659467495de201e2f282063808a41170448c78bada1e62707b07a27b05e6943", size = 455984 }, + { url = "https://files.pythonhosted.org/packages/e7/a9/3e3b48fcaade3eb4e347d39ebf0bd44291db21a3e4507854b42a7cb910ac/safetensors-0.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bad5e4b2476949bcd638a89f71b6916fa9a5cae5c1ae7eede337aca2100435c0", size = 619513 }, + { url = "https://files.pythonhosted.org/packages/80/23/2a7a1be24258c0e44c1d356896fd63dc0545a98d2d0184925fa09cd3ec76/safetensors-0.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a3a315a6d0054bc6889a17f5668a73f94f7fe55121ff59e0a199e3519c08565f", size = 604841 }, + { url = "https://files.pythonhosted.org/packages/b4/5c/34d082ff1fffffd8545fb22cbae3285ab4236f1f0cfc64b7e58261c2363b/safetensors-0.4.5-cp311-none-win32.whl", hash = "sha256:a01e232e6d3d5cf8b1667bc3b657a77bdab73f0743c26c1d3c5dd7ce86bd3a92", size = 272602 }, + { url = "https://files.pythonhosted.org/packages/6d/41/948c96c8a7e9fef57c2e051f1871c108a6dbbc6d285598bdb1d89b98617c/safetensors-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:cbd39cae1ad3e3ef6f63a6f07296b080c951f24cec60188378e43d3713000c04", size = 285973 }, + { url = "https://files.pythonhosted.org/packages/bf/ac/5a63082f931e99200db95fd46fb6734f050bb6e96bf02521904c6518b7aa/safetensors-0.4.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:473300314e026bd1043cef391bb16a8689453363381561b8a3e443870937cc1e", size = 392015 }, + { url = "https://files.pythonhosted.org/packages/73/95/ab32aa6e9bdc832ff87784cdf9da26192b93de3ef82b8d1ada8f345c5044/safetensors-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:801183a0f76dc647f51a2d9141ad341f9665602a7899a693207a82fb102cc53e", size = 381774 }, + { url = "https://files.pythonhosted.org/packages/d6/6c/7e04b7626809fc63f3698f4c50e43aff2864b40089aa4506c918a75b8eed/safetensors-0.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1524b54246e422ad6fb6aea1ac71edeeb77666efa67230e1faf6999df9b2e27f", size = 441134 }, + { url = "https://files.pythonhosted.org/packages/58/2b/ffe7c86a277e6c1595fbdf415cfe2903f253f574a5405e93fda8baaa582c/safetensors-0.4.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3139098e3e8b2ad7afbca96d30ad29157b50c90861084e69fcb80dec7430461", size = 438467 }, + { url = "https://files.pythonhosted.org/packages/67/9c/f271bd804e08c7fda954d17b70ff281228a88077337a9e70feace4f4cc93/safetensors-0.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65573dc35be9059770808e276b017256fa30058802c29e1038eb1c00028502ea", size = 476566 }, + { url = "https://files.pythonhosted.org/packages/4c/ad/4cf76a3e430a8a26108407fa6cb93e6f80d996a5cb75d9540c8fe3862990/safetensors-0.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd33da8e9407559f8779c82a0448e2133737f922d71f884da27184549416bfed", size = 492253 }, + { url = "https://files.pythonhosted.org/packages/d9/40/a6f75ea449a9647423ec8b6f72c16998d35aa4b43cb38536ac060c5c7bf5/safetensors-0.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3685ce7ed036f916316b567152482b7e959dc754fcc4a8342333d222e05f407c", size = 434769 }, + { url = "https://files.pythonhosted.org/packages/52/47/d4b49b1231abf3131f7bb0bc60ebb94b27ee33e0a1f9569da05f8ac65dee/safetensors-0.4.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dde2bf390d25f67908278d6f5d59e46211ef98e44108727084d4637ee70ab4f1", size = 457166 }, + { url = "https://files.pythonhosted.org/packages/c3/cd/006468b03b0fa42ff82d795d47c4193e99001e96c3f08bd62ef1b5cab586/safetensors-0.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7469d70d3de970b1698d47c11ebbf296a308702cbaae7fcb993944751cf985f4", size = 619280 }, + { url = "https://files.pythonhosted.org/packages/22/4d/b6208d918e83daa84b424c0ac3191ae61b44b3191613a3a5a7b38f94b8ad/safetensors-0.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a6ba28118636a130ccbb968bc33d4684c48678695dba2590169d5ab03a45646", size = 605390 }, + { url = "https://files.pythonhosted.org/packages/e8/20/bf0e01825dc01ed75538021a98b9a046e60ead63c6c6700764c821a8c873/safetensors-0.4.5-cp312-none-win32.whl", hash = "sha256:c859c7ed90b0047f58ee27751c8e56951452ed36a67afee1b0a87847d065eec6", size = 273250 }, + { url = "https://files.pythonhosted.org/packages/f1/5f/ab6b6cec85b40789801f35b7d2fb579ae242d8193929974a106d5ff5c835/safetensors-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:b5a8810ad6a6f933fff6c276eae92c1da217b39b4d8b1bc1c0b8af2d270dc532", size = 286307 }, + { url = "https://files.pythonhosted.org/packages/90/61/0e27b1403e311cba0be20026bee4ee822d90eda7dad372179e7f18bb99f3/safetensors-0.4.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:25e5f8e2e92a74f05b4ca55686234c32aac19927903792b30ee6d7bd5653d54e", size = 392062 }, + { url = "https://files.pythonhosted.org/packages/b1/9f/cc31fafc9f5d79da10a83a820ca37f069bab0717895ad8cbcacf629dd1c5/safetensors-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81efb124b58af39fcd684254c645e35692fea81c51627259cdf6d67ff4458916", size = 382517 }, + { url = "https://files.pythonhosted.org/packages/a4/c7/4fda8a0ebb96662550433378f4a74c677fa5fc4d0a43a7ec287d1df254a9/safetensors-0.4.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:585f1703a518b437f5103aa9cf70e9bd437cb78eea9c51024329e4fb8a3e3679", size = 441378 }, + { url = "https://files.pythonhosted.org/packages/14/31/9abb431f6209de9c80dab83e1112ebd769f1e32e7ab7ab228a02424a4693/safetensors-0.4.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b99fbf72e3faf0b2f5f16e5e3458b93b7d0a83984fe8d5364c60aa169f2da89", size = 438831 }, + { url = "https://files.pythonhosted.org/packages/37/37/99bfb195578a808b8d045159ee9264f8da58d017ac0701853dcacda14d4e/safetensors-0.4.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b17b299ca9966ca983ecda1c0791a3f07f9ca6ab5ded8ef3d283fff45f6bcd5f", size = 477112 }, + { url = "https://files.pythonhosted.org/packages/7d/05/fac3ef107e60d2a78532bed171a91669d4bb259e1236f5ea8c67a6976c75/safetensors-0.4.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76ded72f69209c9780fdb23ea89e56d35c54ae6abcdec67ccb22af8e696e449a", size = 493373 }, + { url = "https://files.pythonhosted.org/packages/cf/7a/825800ee8c68214b4fd3506d5e19209338c69b41e01c6e14dd13969cc8b9/safetensors-0.4.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2783956926303dcfeb1de91a4d1204cd4089ab441e622e7caee0642281109db3", size = 435422 }, + { url = "https://files.pythonhosted.org/packages/5e/6c/7a3233c08bde558d6c33a41219119866cb596139a4673cc6c24024710ffd/safetensors-0.4.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d94581aab8c6b204def4d7320f07534d6ee34cd4855688004a4354e63b639a35", size = 457382 }, + { url = "https://files.pythonhosted.org/packages/a0/58/0b7bcba3788ff503990cf9278d611b56c029400612ba93e772c987b5aa03/safetensors-0.4.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:67e1e7cb8678bb1b37ac48ec0df04faf689e2f4e9e81e566b5c63d9f23748523", size = 619301 }, + { url = "https://files.pythonhosted.org/packages/82/cc/9c2cf58611daf1c83ce5d37f9de66353e23fcda36008b13fd3409a760aa3/safetensors-0.4.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbd280b07e6054ea68b0cb4b16ad9703e7d63cd6890f577cb98acc5354780142", size = 605580 }, + { url = "https://files.pythonhosted.org/packages/cf/ff/037ae4c0ee32db496669365e66079b6329906c6814722b159aa700e67208/safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410", size = 392951 }, + { url = "https://files.pythonhosted.org/packages/f1/d6/6621e16b35bf83ae099eaab07338f04991a26c9aa43879d05f19f35e149c/safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c", size = 383417 }, + { url = "https://files.pythonhosted.org/packages/ae/88/3068e1bb16f5e9f9068901de3cf7b3db270b9bfe6e7d51d4b55c1da0425d/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597", size = 442311 }, + { url = "https://files.pythonhosted.org/packages/f7/15/a2bb77ebbaa76b61ec2e9f731fe4db7f9473fd855d881957c51b3a168892/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920", size = 436678 }, + { url = "https://files.pythonhosted.org/packages/ec/79/9608c4546cdbfe3860dd7aa59e3562c9289113398b1a0bd89b68ce0a9d41/safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a", size = 457316 }, + { url = "https://files.pythonhosted.org/packages/0f/23/b17b483f2857835962ad33e38014efd4911791187e177bc23b057d35bee8/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab", size = 620565 }, + { url = "https://files.pythonhosted.org/packages/19/46/5d11dc300feaad285c2f1bd784ff3f689f5e0ab6be49aaf568f3a77019eb/safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f", size = 606660 }, +] + +[[package]] +name = "schema" +version = "0.7.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/01/0ea2e66bad2f13271e93b729c653747614784d3ebde219679e41ccdceecd/schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807", size = 44245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/1b/81855a88c6db2b114d5b2e9f96339190d5ee4d1b981d217fa32127bb00e0/schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde", size = 18632 }, +] + +[[package]] +name = "scikit-learn" +version = "1.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/59/44985a2bdc95c74e34fef3d10cb5d93ce13b0e2a7baefffe1b53853b502d/scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d", size = 7001680 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/89/be41419b4bec629a4691183a5eb1796f91252a13a5ffa243fd958cad7e91/scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6", size = 12106070 }, + { url = "https://files.pythonhosted.org/packages/bf/e0/3b6d777d375f3b685f433c93384cdb724fb078e1dc8f8ff0950467e56c30/scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0", size = 10971758 }, + { url = "https://files.pythonhosted.org/packages/7b/31/eb7dd56c371640753953277de11356c46a3149bfeebb3d7dcd90b993715a/scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540", size = 12500080 }, + { url = "https://files.pythonhosted.org/packages/4c/1e/a7c7357e704459c7d56a18df4a0bf08669442d1f8878cc0864beccd6306a/scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8", size = 13347241 }, + { url = "https://files.pythonhosted.org/packages/48/76/154ebda6794faf0b0f3ccb1b5cd9a19f0a63cb9e1f3d2c61b6114002677b/scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113", size = 11000477 }, + { url = "https://files.pythonhosted.org/packages/ff/91/609961972f694cb9520c4c3d201e377a26583e1eb83bc5a334c893729214/scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445", size = 12088580 }, + { url = "https://files.pythonhosted.org/packages/cd/7a/19fe32c810c5ceddafcfda16276d98df299c8649e24e84d4f00df4a91e01/scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de", size = 10975994 }, + { url = "https://files.pythonhosted.org/packages/4c/75/62e49f8a62bf3c60b0e64d0fce540578ee4f0e752765beb2e1dc7c6d6098/scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675", size = 12465782 }, + { url = "https://files.pythonhosted.org/packages/49/21/3723de321531c9745e40f1badafd821e029d346155b6c79704e0b7197552/scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1", size = 13322034 }, + { url = "https://files.pythonhosted.org/packages/17/1c/ccdd103cfcc9435a18819856fbbe0c20b8fa60bfc3343580de4be13f0668/scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6", size = 11015224 }, + { url = "https://files.pythonhosted.org/packages/a4/db/b485c1ac54ff3bd9e7e6b39d3cc6609c4c76a65f52ab0a7b22b6c3ab0e9d/scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a", size = 12110344 }, + { url = "https://files.pythonhosted.org/packages/54/1a/7deb52fa23aebb855431ad659b3c6a2e1709ece582cb3a63d66905e735fe/scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1", size = 11033502 }, + { url = "https://files.pythonhosted.org/packages/a1/32/4a7a205b14c11225609b75b28402c196e4396ac754dab6a81971b811781c/scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd", size = 12085794 }, + { url = "https://files.pythonhosted.org/packages/c6/29/044048c5e911373827c0e1d3051321b9183b2a4f8d4e2f11c08fcff83f13/scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6", size = 12945797 }, + { url = "https://files.pythonhosted.org/packages/aa/ce/c0b912f2f31aeb1b756a6ba56bcd84dd1f8a148470526a48515a3f4d48cd/scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1", size = 10985467 }, +] + +[[package]] +name = "scipy" +version = "1.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/11/4d44a1f274e002784e4dbdb81e0ea96d2de2d1045b2132d5af62cc31fd28/scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417", size = 58620554 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/68/3bc0cfaf64ff507d82b1e5d5b64521df4c8bf7e22bc0b897827cbee9872c/scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389", size = 39069598 }, + { url = "https://files.pythonhosted.org/packages/43/a5/8d02f9c372790326ad405d94f04d4339482ec082455b9e6e288f7100513b/scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3", size = 29879676 }, + { url = "https://files.pythonhosted.org/packages/07/42/0e0bea9666fcbf2cb6ea0205db42c81b1f34d7b729ba251010edf9c80ebd/scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0", size = 23088696 }, + { url = "https://files.pythonhosted.org/packages/15/47/298ab6fef5ebf31b426560e978b8b8548421d4ed0bf99263e1eb44532306/scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3", size = 25470699 }, + { url = "https://files.pythonhosted.org/packages/d8/df/cdb6be5274bc694c4c22862ac3438cb04f360ed9df0aecee02ce0b798380/scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d", size = 35606631 }, + { url = "https://files.pythonhosted.org/packages/47/78/b0c2c23880dd1e99e938ad49ccfb011ae353758a2dc5ed7ee59baff684c3/scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69", size = 41178528 }, + { url = "https://files.pythonhosted.org/packages/5d/aa/994b45c34b897637b853ec04334afa55a85650a0d11dacfa67232260fb0a/scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad", size = 42784535 }, + { url = "https://files.pythonhosted.org/packages/e7/1c/8daa6df17a945cb1a2a1e3bae3c49643f7b3b94017ff01a4787064f03f84/scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5", size = 44772117 }, + { url = "https://files.pythonhosted.org/packages/b2/ab/070ccfabe870d9f105b04aee1e2860520460ef7ca0213172abfe871463b9/scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675", size = 39076999 }, + { url = "https://files.pythonhosted.org/packages/a7/c5/02ac82f9bb8f70818099df7e86c3ad28dae64e1347b421d8e3adf26acab6/scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2", size = 29894570 }, + { url = "https://files.pythonhosted.org/packages/ed/05/7f03e680cc5249c4f96c9e4e845acde08eb1aee5bc216eff8a089baa4ddb/scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617", size = 23103567 }, + { url = "https://files.pythonhosted.org/packages/5e/fc/9f1413bef53171f379d786aabc104d4abeea48ee84c553a3e3d8c9f96a9c/scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8", size = 25499102 }, + { url = "https://files.pythonhosted.org/packages/c2/4b/b44bee3c2ddc316b0159b3d87a3d467ef8d7edfd525e6f7364a62cd87d90/scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37", size = 35586346 }, + { url = "https://files.pythonhosted.org/packages/93/6b/701776d4bd6bdd9b629c387b5140f006185bd8ddea16788a44434376b98f/scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2", size = 41165244 }, + { url = "https://files.pythonhosted.org/packages/06/57/e6aa6f55729a8f245d8a6984f2855696c5992113a5dc789065020f8be753/scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2", size = 42817917 }, + { url = "https://files.pythonhosted.org/packages/ea/c2/5ecadc5fcccefaece775feadcd795060adf5c3b29a883bff0e678cfe89af/scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94", size = 44781033 }, + { url = "https://files.pythonhosted.org/packages/c0/04/2bdacc8ac6387b15db6faa40295f8bd25eccf33f1f13e68a72dc3c60a99e/scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d", size = 39128781 }, + { url = "https://files.pythonhosted.org/packages/c8/53/35b4d41f5fd42f5781dbd0dd6c05d35ba8aa75c84ecddc7d44756cd8da2e/scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07", size = 29939542 }, + { url = "https://files.pythonhosted.org/packages/66/67/6ef192e0e4d77b20cc33a01e743b00bc9e68fb83b88e06e636d2619a8767/scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5", size = 23148375 }, + { url = "https://files.pythonhosted.org/packages/f6/32/3a6dedd51d68eb7b8e7dc7947d5d841bcb699f1bf4463639554986f4d782/scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc", size = 25578573 }, + { url = "https://files.pythonhosted.org/packages/f0/5a/efa92a58dc3a2898705f1dc9dbaf390ca7d4fba26d6ab8cfffb0c72f656f/scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310", size = 35319299 }, + { url = "https://files.pythonhosted.org/packages/8e/ee/8a26858ca517e9c64f84b4c7734b89bda8e63bec85c3d2f432d225bb1886/scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066", size = 40849331 }, + { url = "https://files.pythonhosted.org/packages/a5/cd/06f72bc9187840f1c99e1a8750aad4216fc7dfdd7df46e6280add14b4822/scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1", size = 42544049 }, + { url = "https://files.pythonhosted.org/packages/aa/7d/43ab67228ef98c6b5dd42ab386eae2d7877036970a0d7e3dd3eb47a0d530/scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f", size = 44521212 }, + { url = "https://files.pythonhosted.org/packages/50/ef/ac98346db016ff18a6ad7626a35808f37074d25796fd0234c2bb0ed1e054/scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79", size = 39091068 }, + { url = "https://files.pythonhosted.org/packages/b9/cc/70948fe9f393b911b4251e96b55bbdeaa8cca41f37c26fd1df0232933b9e/scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e", size = 29875417 }, + { url = "https://files.pythonhosted.org/packages/3b/2e/35f549b7d231c1c9f9639f9ef49b815d816bf54dd050da5da1c11517a218/scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73", size = 23084508 }, + { url = "https://files.pythonhosted.org/packages/3f/d6/b028e3f3e59fae61fb8c0f450db732c43dd1d836223a589a8be9f6377203/scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e", size = 25503364 }, + { url = "https://files.pythonhosted.org/packages/a7/2f/6c142b352ac15967744d62b165537a965e95d557085db4beab2a11f7943b/scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d", size = 35292639 }, + { url = "https://files.pythonhosted.org/packages/56/46/2449e6e51e0d7c3575f289f6acb7f828938eaab8874dbccfeb0cd2b71a27/scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e", size = 40798288 }, + { url = "https://files.pythonhosted.org/packages/32/cd/9d86f7ed7f4497c9fd3e39f8918dd93d9f647ba80d7e34e4946c0c2d1a7c/scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06", size = 42524647 }, + { url = "https://files.pythonhosted.org/packages/f5/1b/6ee032251bf4cdb0cc50059374e86a9f076308c1512b61c4e003e241efb7/scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84", size = 44469524 }, +] + +[[package]] +name = "sentence-transformers" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "pillow" }, + { name = "scikit-learn" }, + { name = "scipy" }, + { name = "torch" }, + { name = "tqdm" }, + { name = "transformers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/09/f1c25d63d55941d8914f5df91c00a30cd0c10748458e49b4a90f97aa190d/sentence_transformers-3.1.1.tar.gz", hash = "sha256:8f00020ef4ad6b918475c38af545c22f61403b67eb22d994860bab06902db160", size = 191595 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/0c/0bbbf03748c3c7c69f41f016b14cbee946cbd8880d0fb91a05c6f7b7a176/sentence_transformers-3.1.1-py3-none-any.whl", hash = "sha256:c73bf6f17e3676bb9372a6133a254ebfb5907586b470f2bac5a840c64c3cf97e", size = 245290 }, +] + +[[package]] +name = "sentry-sdk" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/23/6527e56fb17817153c37d702d6b9ed0a2f75ed213fd98a176c1b8894ad20/sentry_sdk-2.14.0.tar.gz", hash = "sha256:1e0e2eaf6dad918c7d1e0edac868a7bf20017b177f242cefe2a6bcd47955961d", size = 282948 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/de/956ce1d71459fa1af0486ca141fc605ac16f7c8855750668ff663e2b436a/sentry_sdk-2.14.0-py2.py3-none-any.whl", hash = "sha256:b8bc3dc51d06590df1291b7519b85c75e2ced4f28d9ea655b6d54033503b5bf4", size = 311425 }, +] + +[package.optional-dependencies] +fastapi = [ + { name = "fastapi" }, +] +loguru = [ + { name = "loguru" }, +] + +[[package]] +name = "setuptools" +version = "75.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/b8/f21073fde99492b33ca357876430822e4800cdf522011f18041351dfa74b/setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538", size = 1348057 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/ae/f19306b5a221f6a436d8f2238d5b80925004093fa3edea59835b514d9057/setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2", size = 1248506 }, +] + +[[package]] +name = "shapely" +version = "2.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/89/0d20bac88016be35ff7d3c0c2ae64b477908f1b1dfa540c5d69ac7af07fe/shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6", size = 282361 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d4/f84bbbdb7771f5b9ade94db2398b256cf1471f1eb0ca8afbe0f6ca725d5a/shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b", size = 1449635 }, + { url = "https://files.pythonhosted.org/packages/03/10/bd6edb66ed0a845f0809f7ce653596f6fd9c6be675b3653872f47bf49f82/shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b", size = 1296756 }, + { url = "https://files.pythonhosted.org/packages/af/09/6374c11cb493a9970e8c04d7be25f578a37f6494a2fecfbed3a447b16b2c/shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde", size = 2381960 }, + { url = "https://files.pythonhosted.org/packages/2b/a6/302e0d9c210ccf4d1ffadf7ab941797d3255dcd5f93daa73aaf116a4db39/shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e", size = 2468133 }, + { url = "https://files.pythonhosted.org/packages/8c/be/e448681dc485f2931d4adee93d531fce93608a3ee59433303cc1a46e21a5/shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e", size = 1294982 }, + { url = "https://files.pythonhosted.org/packages/cd/4c/6f4a6fc085e3be01c4c9de0117a2d373bf9fec5f0426cf4d5c94090a5a4d/shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4", size = 1441141 }, + { url = "https://files.pythonhosted.org/packages/37/15/269d8e1f7f658a37e61f7028683c546f520e4e7cedba1e32c77ff9d3a3c7/shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e", size = 1449578 }, + { url = "https://files.pythonhosted.org/packages/37/63/e182e43081fffa0a2d970c480f2ef91647a6ab94098f61748c23c2a485f2/shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2", size = 1296792 }, + { url = "https://files.pythonhosted.org/packages/6e/5a/d019f69449329dcd517355444fdb9ddd58bec5e080b8bdba007e8e4c546d/shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855", size = 2443997 }, + { url = "https://files.pythonhosted.org/packages/25/aa/53f145e5a610a49af9ac49f2f1be1ec8659ebd5c393d66ac94e57c83b00e/shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0", size = 2528334 }, + { url = "https://files.pythonhosted.org/packages/64/64/0c7b0a22b416d36f6296b92bb4219d82b53d0a7c47e16fd0a4c85f2f117c/shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d", size = 1294669 }, + { url = "https://files.pythonhosted.org/packages/b1/5a/6a67d929c467a1973b6bb9f0b00159cc343b02bf9a8d26db1abd2f87aa23/shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b", size = 1442032 }, + { url = "https://files.pythonhosted.org/packages/46/77/efd9f9d4b6a762f976f8b082f54c9be16f63050389500fb52e4f6cc07c1a/shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0", size = 1450326 }, + { url = "https://files.pythonhosted.org/packages/68/53/5efa6e7a4036a94fe6276cf7bbb298afded51ca3396b03981ad680c8cc7d/shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3", size = 1298480 }, + { url = "https://files.pythonhosted.org/packages/88/a2/1be1db4fc262e536465a52d4f19d85834724fedf2299a1b9836bc82fe8fa/shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8", size = 2439311 }, + { url = "https://files.pythonhosted.org/packages/d5/7d/9a57e187cbf2fbbbdfd4044a4f9ce141c8d221f9963750d3b001f0ec080d/shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726", size = 2524835 }, + { url = "https://files.pythonhosted.org/packages/6d/0a/f407509ab56825f39bf8cfce1fb410238da96cf096809c3e404e5bc71ea1/shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f", size = 1295613 }, + { url = "https://files.pythonhosted.org/packages/7b/b3/857afd9dfbfc554f10d683ac412eac6fa260d1f4cd2967ecb655c57e831a/shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48", size = 1442539 }, + { url = "https://files.pythonhosted.org/packages/34/e8/d164ef5b0eab86088cde06dee8415519ffd5bb0dd1bd9d021e640e64237c/shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013", size = 1445344 }, + { url = "https://files.pythonhosted.org/packages/ce/e2/9fba7ac142f7831757a10852bfa465683724eadbc93d2d46f74a16f9af04/shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7", size = 1296182 }, + { url = "https://files.pythonhosted.org/packages/cf/dc/790d4bda27d196cd56ec66975eaae3351c65614cafd0e16ddde39ec9fb92/shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381", size = 2423426 }, + { url = "https://files.pythonhosted.org/packages/af/b0/f8169f77eac7392d41e231911e0095eb1148b4d40c50ea9e34d999c89a7e/shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805", size = 2513249 }, + { url = "https://files.pythonhosted.org/packages/f6/1d/a8c0e9ab49ff2f8e4dedd71b0122eafb22a18ad7e9d256025e1f10c84704/shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a", size = 1294848 }, + { url = "https://files.pythonhosted.org/packages/23/38/2bc32dd1e7e67a471d4c60971e66df0bdace88656c47a9a728ace0091075/shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2", size = 1441371 }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "spider-client" +version = "0.0.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/fc/a2a4cc112c467f89921328d005c0ac2df9c81f62c8a6d445f747252f5856/spider-client-0.0.27.tar.gz", hash = "sha256:c3feaf5c491bd9a6c509efa0c8789452497073d9f68e70fc90e7626a6a8365aa", size = 5755 } + +[[package]] +name = "sqlalchemy" +version = "2.0.35" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/48/4f190a83525f5cefefa44f6adc9e6386c4de5218d686c27eda92eb1f5424/sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f", size = 9562798 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/61/19395d0ae78c94f6f80c8adf39a142f3fe56cfb2235d8f2317d6dae1bf0e/SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b", size = 2090086 }, + { url = "https://files.pythonhosted.org/packages/e6/82/06b5fcbe5d49043e40cf4e01e3b33c471c8d9292d478420b08538cae8928/SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90", size = 2081278 }, + { url = "https://files.pythonhosted.org/packages/68/d1/7fb7ee46949a5fb34005795b1fc06a8fef67587a66da731c14e545f7eb5b/SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea", size = 3063763 }, + { url = "https://files.pythonhosted.org/packages/7e/ff/a1eacd78b31e52a5073e9924fb4722ecc2a72f093ca8181ed81fc61aed2e/SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33", size = 3072032 }, + { url = "https://files.pythonhosted.org/packages/21/ae/ddfecf149a6d16af87408bca7bd108eef7ef23d376cc8464317efb3cea3f/SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9", size = 3028092 }, + { url = "https://files.pythonhosted.org/packages/cc/51/3e84d42121662a160bacd311cfacb29c1e6a229d59dd8edb09caa8ab283b/SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff", size = 3053543 }, + { url = "https://files.pythonhosted.org/packages/3e/7a/039c78105958da3fc361887f0a82c974cb6fa5bba965c1689ec778be1c01/SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b", size = 2062372 }, + { url = "https://files.pythonhosted.org/packages/a2/50/f31e927d32f9729f69d150ffe47e7cf51e3e0bb2148fc400b3e93a92ca4c/SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e", size = 2086485 }, + { url = "https://files.pythonhosted.org/packages/c3/46/9215a35bf98c3a2528e987791e6180eb51624d2c7d5cb8e2d96a6450b657/SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60", size = 2091274 }, + { url = "https://files.pythonhosted.org/packages/1e/69/919673c5101a0c633658d58b11b454b251ca82300941fba801201434755d/SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62", size = 2081672 }, + { url = "https://files.pythonhosted.org/packages/67/ea/a6b0597cbda12796be2302153369dbbe90573fdab3bc4885f8efac499247/SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6", size = 3200083 }, + { url = "https://files.pythonhosted.org/packages/8c/d6/97bdc8d714fb21762f2092511f380f18cdb2d985d516071fa925bb433a90/SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7", size = 3200080 }, + { url = "https://files.pythonhosted.org/packages/87/d2/8c2adaf2ade4f6f1b725acd0b0be9210bb6a2df41024729a8eec6a86fe5a/SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71", size = 3137108 }, + { url = "https://files.pythonhosted.org/packages/7e/ae/ea05d0bfa8f2b25ae34591895147152854fc950f491c4ce362ae06035db8/SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01", size = 3157437 }, + { url = "https://files.pythonhosted.org/packages/fe/5d/8ad6df01398388a766163d27960b3365f1bbd8bb7b05b5cad321a8b69b25/SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e", size = 2061935 }, + { url = "https://files.pythonhosted.org/packages/ff/68/8557efc0c32c8e2c147cb6512237448b8ed594a57cd015fda67f8e56bb3f/SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8", size = 2087281 }, + { url = "https://files.pythonhosted.org/packages/2f/2b/fff87e6db0da31212c98bbc445f83fb608ea92b96bda3f3f10e373bac76c/SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2", size = 2089790 }, + { url = "https://files.pythonhosted.org/packages/68/92/4bb761bd82764d5827bf6b6095168c40fb5dbbd23670203aef2f96ba6bc6/SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468", size = 2080266 }, + { url = "https://files.pythonhosted.org/packages/22/46/068a65db6dc253c6f25a7598d99e0a1d60b14f661f9d09ef6c73c718fa4e/SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d", size = 3229760 }, + { url = "https://files.pythonhosted.org/packages/6e/36/59830dafe40dda592304debd4cd86e583f63472f3a62c9e2695a5795e786/SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db", size = 3240649 }, + { url = "https://files.pythonhosted.org/packages/00/50/844c50c6996f9c7f000c959dd1a7436a6c94e449ee113046a1d19e470089/SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c", size = 3176138 }, + { url = "https://files.pythonhosted.org/packages/df/d2/336b18cac68eecb67de474fc15c85f13be4e615c6f5bae87ea38c6734ce0/SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8", size = 3202753 }, + { url = "https://files.pythonhosted.org/packages/f0/f3/ee1e62fabdc10910b5ef720ae08e59bc785f26652876af3a50b89b97b412/SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf", size = 2060113 }, + { url = "https://files.pythonhosted.org/packages/60/63/a3cef44a52979169d884f3583d0640e64b3c28122c096474a1d7cfcaf1f3/SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc", size = 2085839 }, + { url = "https://files.pythonhosted.org/packages/0e/c6/33c706449cdd92b1b6d756b247761e27d32230fd6b2de5f44c4c3e5632b2/SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1", size = 1881276 }, +] + +[[package]] +name = "sqlmodel" +version = "0.0.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/19/d0b363befa90c168941f4f7826f6a9d7211f4aa2b99660ac0410bf5803f8/sqlmodel-0.0.18.tar.gz", hash = "sha256:2e520efe03810ef2c268a1004cfc5ef8f8a936312232f38d6c8e62c11af2cac3", size = 109419 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/5f/8838e6b1b6673709e93386d6d42d28030883079b5ebcbdc7a37f2953e993/sqlmodel-0.0.18-py3-none-any.whl", hash = "sha256:d70fdf8fe595e30a918660cf4537b9c5fc2fffdbfcba851a0135de73c3ebcbb7", size = 26507 }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, +] + +[[package]] +name = "starlette" +version = "0.37.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/b5/6bceb93ff20bd7ca36e6f7c540581abb18f53130fabb30ba526e26fd819b/starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823", size = 2843736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/18/31fa32ed6c68ba66220204ef0be798c349d0a20c1901f9d4a794e08c76d8/starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee", size = 71908 }, +] + +[[package]] +name = "sympy" +version = "1.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/8a/5a7fd6284fa8caac23a26c9ddf9c30485a48169344b4bd3b0f02fef1890f/sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9", size = 7533196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/ff/c87e0622b1dadea79d2fb0b25ade9ed98954c9033722eb707053d310d4f3/sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73", size = 6189483 }, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, +] + +[[package]] +name = "tenacity" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/4d/6a19536c50b849338fcbe9290d562b52cbdcf30d8963d3588a68a4107df1/tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78", size = 47309 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687", size = 28165 }, +] + +[[package]] +name = "termcolor" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/10/56/d7d66a84f96d804155f6ff2873d065368b25a07222a6fd51c4f24ef6d764/termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a", size = 12664 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5f/8c716e47b3a50cbd7c146f45881e11d9414def768b7cd9c5e6650ec2a80a/termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63", size = 7719 }, +] + +[[package]] +name = "threadpoolctl" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/55/b5148dcbf72f5cde221f8bfe3b6a540da7aa1842f6b491ad979a6c8b84af/threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107", size = 41936 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/2c/ffbf7a134b9ab11a67b0cf0726453cedd9c5043a4fe7a35d1cefa9a1bcfb/threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467", size = 18414 }, +] + +[[package]] +name = "tiktoken" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/4a/abaec53e93e3ef37224a4dd9e2fc6bb871e7a538c2b6b9d2a6397271daf4/tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6", size = 33437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/10/28d59d43d72a0ebd4211371d0bf10c935cdecbb62b812ae04c58bfc37d96/tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f", size = 961465 }, + { url = "https://files.pythonhosted.org/packages/f8/0c/d4125348dedd1f8f38e3f85245e7fc38858ffc77c9b7edfb762a8191ba0b/tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225", size = 906849 }, + { url = "https://files.pythonhosted.org/packages/b9/ab/f9c7675747f259d133d66065106cf732a7c2bef6043062fbca8e011f7f4d/tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590", size = 1048795 }, + { url = "https://files.pythonhosted.org/packages/e7/8c/7d1007557b343d5cf18349802e94d3a14397121e9105b4661f8cd753f9bf/tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c", size = 1080866 }, + { url = "https://files.pythonhosted.org/packages/72/40/61d6354cb64a563fce475a2907039be9fe809ca5f801213856353b01a35b/tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311", size = 1092776 }, + { url = "https://files.pythonhosted.org/packages/f2/6c/83ca40527d072739f0704b9f59b325786c444ca63672a77cb69adc8181f7/tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5", size = 1142591 }, + { url = "https://files.pythonhosted.org/packages/ec/1f/a5d72755118e9e1b62cdf3ef9138eb83d49088f3cb37a9540025c81c0e75/tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702", size = 798864 }, + { url = "https://files.pythonhosted.org/packages/22/eb/57492b2568eea1d546da5cc1ae7559d924275280db80ba07e6f9b89a914b/tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f", size = 961468 }, + { url = "https://files.pythonhosted.org/packages/30/ef/e07dbfcb2f85c84abaa1b035a9279575a8da0236305491dc22ae099327f7/tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f", size = 907005 }, + { url = "https://files.pythonhosted.org/packages/ea/9b/f36db825b1e9904c3a2646439cb9923fc1e09208e2e071c6d9dd64ead131/tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b", size = 1049183 }, + { url = "https://files.pythonhosted.org/packages/61/b4/b80d1fe33015e782074e96bbbf4108ccd283b8deea86fb43c15d18b7c351/tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992", size = 1080830 }, + { url = "https://files.pythonhosted.org/packages/2a/40/c66ff3a21af6d62a7e0ff428d12002c4e0389f776d3ff96dcaa0bb354eee/tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1", size = 1092967 }, + { url = "https://files.pythonhosted.org/packages/2e/80/f4c9e255ff236e6a69ce44b927629cefc1b63d3a00e2d1c9ed540c9492d2/tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89", size = 1142682 }, + { url = "https://files.pythonhosted.org/packages/b1/10/c04b4ff592a5f46b28ebf4c2353f735c02ae7f0ce1b165d00748ced6467e/tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb", size = 799009 }, + { url = "https://files.pythonhosted.org/packages/1d/46/4cdda4186ce900608f522da34acf442363346688c71b938a90a52d7b84cc/tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908", size = 960446 }, + { url = "https://files.pythonhosted.org/packages/b6/30/09ced367d280072d7a3e21f34263dfbbf6378661e7a0f6414e7c18971083/tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410", size = 906652 }, + { url = "https://files.pythonhosted.org/packages/e6/7b/c949e4954441a879a67626963dff69096e3c774758b9f2bb0853f7b4e1e7/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704", size = 1047904 }, + { url = "https://files.pythonhosted.org/packages/50/81/1842a22f15586072280364c2ab1e40835adaf64e42fe80e52aff921ee021/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350", size = 1079836 }, + { url = "https://files.pythonhosted.org/packages/6d/87/51a133a3d5307cf7ae3754249b0faaa91d3414b85c3d36f80b54d6817aa6/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4", size = 1092472 }, + { url = "https://files.pythonhosted.org/packages/a5/1f/c93517dc6d3b2c9e988b8e24f87a8b2d4a4ab28920a3a3f3ea338397ae0c/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97", size = 1141881 }, + { url = "https://files.pythonhosted.org/packages/bf/4b/48ca098cb580c099b5058bf62c4cb5e90ca6130fa43ef4df27088536245b/tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f", size = 799281 }, +] + +[[package]] +name = "tokenizers" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/04/2071c150f374aab6d5e92aaec38d0f3c368d227dd9e0469a1f0966ac68d1/tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3", size = 321039 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/91cac8d496b304ec5a22f07606893cad35ea8e1a8406dc8909e365f97a80/tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97", size = 2533301 }, + { url = "https://files.pythonhosted.org/packages/4c/12/9cb68762ff5fee1efd51aefe2f62cb225f26f060a68a3779e1060bbc7a59/tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77", size = 2440223 }, + { url = "https://files.pythonhosted.org/packages/e4/03/b2020e6a78fb994cff1ec962adc157c23109172a46b4fe451d6d0dd33fdb/tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4", size = 3683779 }, + { url = "https://files.pythonhosted.org/packages/50/4e/2e5549a26dc6f9e434f83bebf16c2d7dc9dc3477cc0ec8b23ede4d465b90/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642", size = 3569431 }, + { url = "https://files.pythonhosted.org/packages/75/79/158626bd794e75551e0c6bb93f1cd3c9ba08ba14b181b98f09e95994f609/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46", size = 3424739 }, + { url = "https://files.pythonhosted.org/packages/65/8e/5f4316976c26009f1ae0b6543f3d97af29afa5ba5dc145251e6a07314618/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1", size = 3965791 }, + { url = "https://files.pythonhosted.org/packages/6a/e1/5dbac9618709972434eea072670cd69fba1aa988e6200f16057722b4bf96/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe", size = 4049879 }, + { url = "https://files.pythonhosted.org/packages/40/4f/eb78de4af3b17b589f43a369cbf0c3a7173f25c3d2cd93068852c07689aa/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e", size = 3607049 }, + { url = "https://files.pythonhosted.org/packages/f5/f8/141dcb0f88e9452af8d20d14dd53aab5937222a2bb4f2c04bfed6829263c/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98", size = 9634084 }, + { url = "https://files.pythonhosted.org/packages/2e/be/debb7caa3f88ed54015170db16e07aa3a5fea2d3983d0dde92f98d888dc8/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3", size = 9949480 }, + { url = "https://files.pythonhosted.org/packages/7a/e7/26bedf5d270d293d572a90bd66b0b030012aedb95d8ee87e8bcd446b76fb/tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837", size = 2041462 }, + { url = "https://files.pythonhosted.org/packages/f4/85/d999b9a05fd101d48f1a365d68be0b109277bb25c89fb37a389d669f9185/tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403", size = 2220036 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/6e1d728d765eb4102767f071bf7f6439ab10d7f4a975c9217db65715207a/tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059", size = 2533448 }, + { url = "https://files.pythonhosted.org/packages/90/79/d17a0f491d10817cd30f1121a07aa09c8e97a81114b116e473baf1577f09/tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14", size = 2440254 }, + { url = "https://files.pythonhosted.org/packages/c7/28/2d11c3ff94f9d42eceb2ea549a06e3f166fe391c5a025e5d96fac898a3ac/tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594", size = 3684971 }, + { url = "https://files.pythonhosted.org/packages/36/c6/537f22b57e6003904d35d07962dbde2f2e9bdd791d0241da976a4c7f8194/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc", size = 3568894 }, + { url = "https://files.pythonhosted.org/packages/af/ef/3c1deed14ec59b2c8e7e2fa27b2a53f7d101181277a43b89ab17d891ef2e/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2", size = 3426873 }, + { url = "https://files.pythonhosted.org/packages/06/db/c0320c4798ac6bd12d2ef895bec9d10d216a3b4d6fff10e9d68883ea7edc/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe", size = 3965050 }, + { url = "https://files.pythonhosted.org/packages/4c/8a/a166888d6cb14db55f5eb7ce0b1d4777d145aa27cbf4f945712cf6c29935/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d", size = 4047855 }, + { url = "https://files.pythonhosted.org/packages/a7/03/fb50fc03f86016b227a967c8d474f90230c885c0d18f78acdfda7a96ce56/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa", size = 3608228 }, + { url = "https://files.pythonhosted.org/packages/5b/cd/0385e1026e1e03732fd398e964792a3a8433918b166748c82507e014d748/tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6", size = 9633115 }, + { url = "https://files.pythonhosted.org/packages/25/50/8f8ad0bbdaf09d04b15e6502d1fa1c653754ed7e016e4ae009726aa1a4e4/tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b", size = 9949062 }, + { url = "https://files.pythonhosted.org/packages/db/11/31be66710f1d14526f3588a441efadeb184e1e68458067007b20ead03c59/tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256", size = 2041039 }, + { url = "https://files.pythonhosted.org/packages/65/8e/6d7d72b28f22c422cff8beae10ac3c2e4376b9be721ef8167b7eecd1da62/tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66", size = 2220386 }, + { url = "https://files.pythonhosted.org/packages/63/90/2890cd096898dcdb596ee172cde40c0f54a9cf43b0736aa260a5501252af/tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153", size = 2530580 }, + { url = "https://files.pythonhosted.org/packages/74/d1/f4e1e950adb36675dfd8f9d0f4be644f3f3aaf22a5677a4f5c81282b662e/tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a", size = 2436682 }, + { url = "https://files.pythonhosted.org/packages/ed/30/89b321a16c58d233e301ec15072c0d3ed5014825e72da98604cd3ab2fba1/tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95", size = 3693494 }, + { url = "https://files.pythonhosted.org/packages/05/40/fa899f32de483500fbc78befd378fd7afba4270f17db707d1a78c0a4ddc3/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266", size = 3566541 }, + { url = "https://files.pythonhosted.org/packages/67/14/e7da32ae5fb4971830f1ef335932fae3fa57e76b537e852f146c850aefdf/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52", size = 3430792 }, + { url = "https://files.pythonhosted.org/packages/f2/4b/aae61bdb6ab584d2612170801703982ee0e35f8b6adacbeefe5a3b277621/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f", size = 3962812 }, + { url = "https://files.pythonhosted.org/packages/0a/b6/f7b7ef89c4da7b20256e6eab23d3835f05d1ca8f451d31c16cbfe3cd9eb6/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840", size = 4024688 }, + { url = "https://files.pythonhosted.org/packages/80/54/12047a69f5b382d7ee72044dc89151a2dd0d13b2c9bdcc22654883704d31/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3", size = 3610961 }, + { url = "https://files.pythonhosted.org/packages/52/b7/1e8a913d18ac28feeda42d4d2d51781874398fb59cd1c1e2653a4b5742ed/tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea", size = 9631367 }, + { url = "https://files.pythonhosted.org/packages/ac/3d/2284f6d99f8f21d09352b88b8cfefa24ab88468d962aeb0aa15c20d76b32/tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c", size = 9950121 }, + { url = "https://files.pythonhosted.org/packages/2a/94/ec3369dbc9b7200c14c8c7a1a04c78b7a7398d0c001e1b7d1ffe30eb93a0/tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57", size = 2044069 }, + { url = "https://files.pythonhosted.org/packages/0c/97/80bff6937e0c67d30c0facacd4f0bcf4254e581aa4995c73cef8c8640e56/tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a", size = 2214527 }, + { url = "https://files.pythonhosted.org/packages/cf/7b/38fb7207cde3d1dc5272411cd18178e6437cdc1ef08cac5d0e8cfd57f38c/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334", size = 2532668 }, + { url = "https://files.pythonhosted.org/packages/1d/0d/2c452fe17fc17f0cdb713acb811eebb1f714b8c21d497c4672af4f491229/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd", size = 2438321 }, + { url = "https://files.pythonhosted.org/packages/19/e0/f9e915d028b45798723eab59c253da28040aa66b9f31dcb7cfc3be88fa37/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594", size = 3682304 }, + { url = "https://files.pythonhosted.org/packages/ce/2b/db8a94608c392752681c2ca312487b7cd5bcc4f77e24a90daa4916138271/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda", size = 3566208 }, + { url = "https://files.pythonhosted.org/packages/d8/58/2e998462677c4c0eb5123ce386bcb488a155664d273d0283122866515f09/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022", size = 3605791 }, + { url = "https://files.pythonhosted.org/packages/83/ac/26bc2e2bb2a054dc2e51699628936f5474e093b68da6ccdde04b2fc39ab8/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e", size = 9632867 }, + { url = "https://files.pythonhosted.org/packages/45/b6/36c1bb106bbe96012c9367df89ed01599cada036c0b96d38fbbdbeb75c9f/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75", size = 9945103 }, +] + +[[package]] +name = "tomli" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, +] + +[[package]] +name = "torch" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "jinja2" }, + { name = "networkx" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "sympy" }, + { name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/05/d540049b1832d1062510efc6829634b7fbef5394c757d8312414fb65a3cb/torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971", size = 797072810 }, + { url = "https://files.pythonhosted.org/packages/a0/12/2162df9c47386ae7cedbc938f9703fee4792d93504fab8608d541e71ece3/torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3", size = 89699259 }, + { url = "https://files.pythonhosted.org/packages/5d/4c/b2a59ff0e265f5ee154f0d81e948b1518b94f545357731e1a3245ee5d45b/torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada", size = 199433813 }, + { url = "https://files.pythonhosted.org/packages/dc/fb/1333ba666bbd53846638dd75a7a1d4eaf964aff1c482fc046e2311a1b499/torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd", size = 62139309 }, + { url = "https://files.pythonhosted.org/packages/ea/ea/4ab009e953bca6ff35ad75b8ab58c0923308636c182c145dc63084f7d136/torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113", size = 797111232 }, + { url = "https://files.pythonhosted.org/packages/8f/a1/b31f94b4631c1731261db9fdc9a749ef58facc3b76094a6fe974f611f239/torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8", size = 89719574 }, + { url = "https://files.pythonhosted.org/packages/5a/6a/775b93d6888c31f1f1fc457e4f5cc89f0984412d5dcdef792b8f2aa6e812/torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c", size = 199436128 }, + { url = "https://files.pythonhosted.org/packages/1f/34/c93873c37f93154d982172755f7e504fdbae6c760499303a3111ce6ce327/torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea", size = 62145176 }, + { url = "https://files.pythonhosted.org/packages/cc/df/5204a13a7a973c23c7ade615bafb1a3112b5d0ec258d8390f078fa4ab0f7/torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042", size = 797019590 }, + { url = "https://files.pythonhosted.org/packages/4f/16/d23a689e5ef8001ed2ace1a3a59f2fda842889b0c3f3877799089925282a/torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d", size = 89613802 }, + { url = "https://files.pythonhosted.org/packages/a8/e0/ca8354dfb8d834a76da51b06e8248b70fc182bc163540507919124974bdf/torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c", size = 199387694 }, + { url = "https://files.pythonhosted.org/packages/ac/30/8b6f77ea4ce84f015ee024b8dfef0dac289396254e8bfd493906d4cbb848/torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d", size = 62123443 }, +] + +[[package]] +name = "tornado" +version = "6.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/66/398ac7167f1c7835406888a386f6d0d26ee5dbf197d8a571300be57662d3/tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9", size = 500623 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/d9/c33be3c1a7564f7d42d87a8d186371a75fd142097076767a5c27da941fef/tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", size = 435924 }, + { url = "https://files.pythonhosted.org/packages/2e/0f/721e113a2fac2f1d7d124b3279a1da4c77622e104084f56119875019ffab/tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14", size = 433883 }, + { url = "https://files.pythonhosted.org/packages/13/cf/786b8f1e6fe1c7c675e79657448178ad65e41c1c9765ef82e7f6f765c4c5/tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4", size = 437224 }, + { url = "https://files.pythonhosted.org/packages/e4/8e/a6ce4b8d5935558828b0f30f3afcb2d980566718837b3365d98e34f6067e/tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842", size = 436597 }, + { url = "https://files.pythonhosted.org/packages/22/d4/54f9d12668b58336bd30defe0307e6c61589a3e687b05c366f804b7faaf0/tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3", size = 436797 }, + { url = "https://files.pythonhosted.org/packages/cf/3f/2c792e7afa7dd8b24fad7a2ed3c2f24a5ec5110c7b43a64cb6095cc106b8/tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f", size = 437516 }, + { url = "https://files.pythonhosted.org/packages/71/63/c8fc62745e669ac9009044b889fc531b6f88ac0f5f183cac79eaa950bb23/tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4", size = 436958 }, + { url = "https://files.pythonhosted.org/packages/94/d4/f8ac1f5bd22c15fad3b527e025ce219bd526acdbd903f52053df2baecc8b/tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698", size = 436882 }, + { url = "https://files.pythonhosted.org/packages/4b/3e/a8124c21cc0bbf144d7903d2a0cadab15cadaf683fa39a0f92bc567f0d4d/tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", size = 438092 }, + { url = "https://files.pythonhosted.org/packages/d9/2f/3f2f05e84a7aff787a96d5fb06821323feb370fe0baed4db6ea7b1088f32/tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7", size = 438532 }, +] + +[[package]] +name = "tqdm" +version = "4.66.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd", size = 78351 }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, +] + +[[package]] +name = "transformers" +version = "4.44.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "requests" }, + { name = "safetensors" }, + { name = "tokenizers" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/a3/81de49357a3c6ac4421d48d9662b53293838f217baf3f3bb9eb55f89fab6/transformers-4.44.2.tar.gz", hash = "sha256:36aa17cc92ee154058e426d951684a2dab48751b35b49437896f898931270826", size = 8110312 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/35/07c9879163b603f0e464b0f6e6e628a2340cfc7cdc5ca8e7d52d776710d4/transformers-4.44.2-py3-none-any.whl", hash = "sha256:1c02c65e7bfa5e52a634aff3da52138b583fc6f263c1f28d547dc144ba3d412d", size = 9465369 }, +] + +[[package]] +name = "triton" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 }, + { url = "https://files.pythonhosted.org/packages/33/3e/a2f59384587eff6aeb7d37b6780de7fedd2214935e27520430ca9f5b7975/triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c", size = 209438883 }, + { url = "https://files.pythonhosted.org/packages/fe/7b/7757205dee3628f75e7991021d15cd1bd0c9b044ca9affe99b50879fc0e1/triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb", size = 209464695 }, +] + +[[package]] +name = "typer" +version = "0.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/58/a79003b91ac2c6890fc5d90145c662fd5771c6f11447f116b63300436bc9/typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722", size = 98953 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/2b/886d13e742e514f704c33c4caa7df0f3b89e5a25ef8db02aa9ca3d9535d5/typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b", size = 47288 }, +] + +[[package]] +name = "types-cffi" +version = "1.16.0.20240331" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/c8/81e5699160b91f0f91eea852d84035c412bfb4b3a29389701044400ab379/types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee", size = 11318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/7a/98f5d2493a652cec05d3b09be59202d202004a41fca9c70d224782611365/types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0", size = 14550 }, +] + +[[package]] +name = "types-google-cloud-ndb" +version = "2.3.0.20240813" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/48/37fb4586c437768b55385fed2bcd696d742c7a37c8728ae644fbac2d3363/types-google-cloud-ndb-2.3.0.20240813.tar.gz", hash = "sha256:f69b4f1abc4a2c423b288ffc48d2994b59358bfc151824614abc1d3f7f19f18d", size = 14673 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/3f/c794e4a6500ea79d1866868c4801319f843503a31b1dc34deace81ad3b7c/types_google_cloud_ndb-2.3.0.20240813-py3-none-any.whl", hash = "sha256:79404e04e97324d0b6466f297e92e734a38fb9cd064c2f3816820311bc6c3f57", size = 18010 }, +] + +[[package]] +name = "types-markdown" +version = "3.7.0.20240822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/5d/2ac1b166d6f251d67c3e5d4b6095e122bafea0e184d54122aa13efc2dd27/types-Markdown-3.7.0.20240822.tar.gz", hash = "sha256:183557c9f4f865bdefd8f5f96a38145c31819271cde111d35557c3bd2069e78d", size = 13187 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/25/381b2e70da6f14084b64578a1f251b50f1ccd3197a3353389b5b6189b4db/types_Markdown-3.7.0.20240822-py3-none-any.whl", hash = "sha256:bec91c410aaf2470ffdb103e38438fbcc53689b00133f19e64869eb138432ad7", size = 18976 }, +] + +[[package]] +name = "types-passlib" +version = "1.7.7.20240819" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/19/5041c4bce2909c67fc3f9471ad67972d94c31cb591a970a8faf1220a3717/types-passlib-1.7.7.20240819.tar.gz", hash = "sha256:8fc8df71623845032293d5cf7f8091f0adfeba02d387a2888684b8413f14b3d0", size = 18386 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/4b/606ac25e89908e4577cd1aa19ffbebe55a6720cff69303db68701f3cc388/types_passlib-1.7.7.20240819-py3-none-any.whl", hash = "sha256:c4d299083497b66e12258c7b77c08952574213fdf7009da3135d8181a6a25f23", size = 33240 }, +] + +[[package]] +name = "types-pillow" +version = "10.2.0.20240822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/4a/4495264dddaa600d65d68bcedb64dcccf9d9da61adff51f7d2ffd8e4c9ce/types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3", size = 35389 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/23/e81a5354859831fcf54d488d33b80ba6133ea84f874a9c0ec40a4881e133/types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d", size = 54354 }, +] + +[[package]] +name = "types-pyasn1" +version = "0.6.0.20240913" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/e2/42410b64ba53584a8f2b681e76b23569a61869a22325cfeef2728e999ffd/types-pyasn1-0.6.0.20240913.tar.gz", hash = "sha256:a1da054db13d3d4ccfa69c515678154014336ad3d9f9ade01845f9edb1a2bc71", size = 12375 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/6a/847b2a137dba37974e17cc2ae5f62bed0d68006ac609a063810038adecff/types_pyasn1-0.6.0.20240913-py3-none-any.whl", hash = "sha256:95f3cb1fbd63ff91cd0410945f8aeae6b0be359533c00f39d8e17124884157af", size = 19338 }, +] + +[[package]] +name = "types-pyopenssl" +version = "24.1.0.20240722" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "types-cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499 }, +] + +[[package]] +name = "types-python-jose" +version = "3.3.4.20240106" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/83/5824277f62a0a07ab3eaade216f19b59fadea4efdad9071d70799d97f170/types-python-jose-3.3.4.20240106.tar.gz", hash = "sha256:b18cf8c5080bbfe1ef7c3b707986435d9efca3e90889acb6a06f65e06bc3405a", size = 6937 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/bd/fe23e814f1ca70f8427bf5defb981b8e0636731863c7836a6d6b6e49c715/types_python_jose-3.3.4.20240106-py3-none-any.whl", hash = "sha256:b515a6c0c61f5e2a53bc93e3a2b024cbd42563e2e19cbde9fd1c2cc2cfe77ccc", size = 9712 }, +] + +[[package]] +name = "types-pytz" +version = "2024.2.0.20240913" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/cf/a4811b07d7309d9eecf0f383ca5747ce90f8a0d860acb2050bc57f3c9379/types-pytz-2024.2.0.20240913.tar.gz", hash = "sha256:4433b5df4a6fc587bbed41716d86a5ba5d832b4378e506f40d34bc9c81df2c24", size = 5462 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/a6/8846372f55c6bb470ff7207e4dc601017e264e5fe7d79a441ece3545b36c/types_pytz-2024.2.0.20240913-py3-none-any.whl", hash = "sha256:a1eebf57ebc6e127a99d2fa2ba0a88d2b173784ef9b3defcc2004ab6855a44df", size = 5251 }, +] + +[[package]] +name = "types-pywin32" +version = "306.0.0.20240822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/1c/2b808a59acf6713b7eae38fd324a98be56f9a4f5c0b860b7f062808d036e/types-pywin32-306.0.0.20240822.tar.gz", hash = "sha256:34d22b58aaa2cc86fe585b6e2e1eda88a60b010badea0e0e4a410ebe28744645", size = 257145 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/ac/e60d44fb61ec74725ca1e611872573c3a5043ba6629833b6ff760b6ed758/types_pywin32-306.0.0.20240822-py3-none-any.whl", hash = "sha256:31a16f7eaf711166e8aec50ee1ddf0f16b4512e19ecc92a019ae7a0860b64bad", size = 320285 }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240917" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/7d/a95df0a11f95c8f48d7683f03e4aed1a2c0fc73e9de15cca4d38034bea1a/types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587", size = 12381 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2c/c1d81d680997d24b0542aa336f0a65bd7835e5224b7670f33a7d617da379/types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570", size = 15264 }, +] + +[[package]] +name = "types-redis" +version = "4.6.0.20240903" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "types-pyopenssl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/d6/1eb493bb75cc0f60cedc4ec53ad581804b518a43fcb0c6b66760a175af6e/types-redis-4.6.0.20240903.tar.gz", hash = "sha256:4bab1a378dbf23c2c95c370dfdb89a8f033957c4fd1a53fee71b529c182fe008", size = 49572 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/51/c65f5729c1adb5f5a3f19fce0f500c19196cd1f91c00815fc573ceadc7f5/types_redis-4.6.0.20240903-py3-none-any.whl", hash = "sha256:0e7537e5c085fe96b7d468d5edae0cf667b4ba4b62c6e4a5dfc340bd3b868c23", size = 58719 }, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20240914" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/9e/aea33405c230cc3984c9f1065012d3a2003cef910730c367a0e91e7a4901/types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405", size = 18030 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/55/ea44dad71b9d92f86198f7448f5ba46ac919355f4f69bb1c0fa1af02b1b4/types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310", size = 15838 }, +] + +[[package]] +name = "types-setuptools" +version = "75.1.0.20240917" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/91/c1168caa2a5ba14c01b146b516fab2d8646887cb5db7e78e13b9c6da88d2/types-setuptools-75.1.0.20240917.tar.gz", hash = "sha256:12f12a165e7ed383f31def705e5c0fa1c26215dd466b0af34bd042f7d5331f55", size = 42585 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4c/a4c87d86ba18ff00773ab8591c79c23a6938293ab3e2cec2b2eb4ca5b644/types_setuptools-75.1.0.20240917-py3-none-any.whl", hash = "sha256:06f78307e68d1bbde6938072c57b81cf8a99bc84bd6dc7e4c5014730b097dc0c", size = 65516 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 }, +] + +[[package]] +name = "tzdata" +version = "2024.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370 }, +] + +[[package]] +name = "uncurl" +version = "0.0.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyperclip" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/90/95297af714749e5f6dedb8677bd3b6087dc3e31a212633f90c92bbda24c0/uncurl-0.0.11.tar.gz", hash = "sha256:530c9bbd4d118f4cde6194165ff484cc25b0661cd256f19e9d5fcb53fc077790", size = 3288 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/f4/20f213a91ef5a09f53d24333e002a71fbb53b38d5eaa9e49d39f7ce1941e/uncurl-0.0.11-py3-none-any.whl", hash = "sha256:5961e93f07a5c9f2ef8ae4245bd92b0a6ce503c851de980f5b70080ae74cdc59", size = 7367 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +] + +[[package]] +name = "uvicorn" +version = "0.30.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/01/5e637e7aa9dd031be5376b9fb749ec20b86f5a5b6a49b87fabd374d5fa9f/uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788", size = 42825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/8e/cdc7d6263db313030e4c257dd5ba3909ebc4e4fb53ad62d5f09b1a2f5458/uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5", size = 62835 }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/f1/dc9577455e011ad43d9379e836ee73f40b4f99c02946849a44f7ae64835e/uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469", size = 2329938 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/69/cc1ad125ea8ce4a4d3ba7d9836062c3fc9063cf163ddf0f168e73f3268e3/uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996", size = 1363922 }, + { url = "https://files.pythonhosted.org/packages/f7/45/5a3f7a32372e4a90dfd83f30507183ec38990b8c5930ed7e36c6a15af47b/uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b", size = 760386 }, + { url = "https://files.pythonhosted.org/packages/9e/a5/9e973b25ade12c938940751bce71d0cb36efee3489014471f7d9c0a3c379/uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10", size = 3432586 }, + { url = "https://files.pythonhosted.org/packages/a9/e0/0bec8a25b2e9cf14fdfcf0229637b437c923b4e5ca22f8e988363c49bb51/uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae", size = 3431802 }, + { url = "https://files.pythonhosted.org/packages/95/3b/14cef46dcec6237d858666a4a1fdb171361528c70fcd930bfc312920e7a9/uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006", size = 4144444 }, + { url = "https://files.pythonhosted.org/packages/9d/5a/0ac516562ff783f760cab3b061f10fdeb4a9f985ad4b44e7e4564ff11691/uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73", size = 4147039 }, + { url = "https://files.pythonhosted.org/packages/64/bf/45828beccf685b7ed9638d9b77ef382b470c6ca3b5bff78067e02ffd5663/uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037", size = 1320593 }, + { url = "https://files.pythonhosted.org/packages/27/c0/3c24e50bee7802a2add96ca9f0d5eb0ebab07e0a5615539d38aeb89499b9/uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9", size = 736676 }, + { url = "https://files.pythonhosted.org/packages/83/ce/ffa3c72954eae36825acfafd2b6a9221d79abd2670c0d25e04d6ef4a2007/uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e", size = 3494573 }, + { url = "https://files.pythonhosted.org/packages/46/6d/4caab3a36199ba52b98d519feccfcf48921d7a6649daf14a93c7e77497e9/uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756", size = 3489932 }, + { url = "https://files.pythonhosted.org/packages/e4/4f/49c51595bd794945c88613df88922c38076eae2d7653f4624aa6f4980b07/uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0", size = 4185596 }, + { url = "https://files.pythonhosted.org/packages/b8/94/7e256731260d313f5049717d1c4582d52a3b132424c95e16954a50ab95d3/uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf", size = 4185746 }, + { url = "https://files.pythonhosted.org/packages/2d/64/31cbd379d6e260ac8de3f672f904e924f09715c3f192b09f26cc8e9f574c/uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d", size = 1324302 }, + { url = "https://files.pythonhosted.org/packages/1e/6b/9207e7177ff30f78299401f2e1163ea41130d4fd29bcdc6d12572c06b728/uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e", size = 738105 }, + { url = "https://files.pythonhosted.org/packages/c1/ba/b64b10f577519d875992dc07e2365899a1a4c0d28327059ce1e1bdfb6854/uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9", size = 4090658 }, + { url = "https://files.pythonhosted.org/packages/0a/f8/5ceea6876154d926604f10c1dd896adf9bce6d55a55911364337b8a5ed8d/uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab", size = 4173357 }, + { url = "https://files.pythonhosted.org/packages/18/b2/117ab6bfb18274753fbc319607bf06e216bd7eea8be81d5bac22c912d6a7/uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5", size = 4029868 }, + { url = "https://files.pythonhosted.org/packages/6f/52/deb4be09060637ef4752adaa0b75bf770c20c823e8108705792f99cd4a6f/uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00", size = 4115980 }, +] + +[[package]] +name = "vine" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636 }, +] + +[[package]] +name = "virtualenv" +version = "20.26.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/4c/66ce54c8736ff164e85117ca36b02a1e14c042a6963f85eeda82664fda4e/virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4", size = 9371932 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/1d/e1a44fdd6d30829ba21fc58b5d98a67e7aae8f4165f11d091e53aec12560/virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6", size = 5999288 }, +] + +[[package]] +name = "vulture" +version = "2.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/8f/5e24a3587d7b034544ec6fc5db2cb5e9f9c2ff86e800d73ab10f5ca806c0/vulture-2.12.tar.gz", hash = "sha256:c35e98e992eb84b01cdadbfeb0aca2d44363e7dfe6c19416f65001ae69360ccc", size = 56419 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/ff/c0a36b26ceba87c3d4a43b3dc7bc93c774e0b07c656641b3688404571b13/vulture-2.12-py2.py3-none-any.whl", hash = "sha256:68ee4c4ce0128bb504dc7c2df4244c97ef5a2e29af42f27a976a6e30906e993a", size = 27570 }, +] + +[[package]] +name = "watchfiles" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/27/2ba23c8cc85796e2d41976439b08d52f691655fdb9401362099502d1f0cf/watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1", size = 37870 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/a1/631c12626378b9f1538664aa221feb5c60dfafbd7f60b451f8d0bdbcdedd/watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0", size = 375096 }, + { url = "https://files.pythonhosted.org/packages/f7/5c/f27c979c8a10aaa2822286c1bffdce3db731cd1aa4224b9f86623e94bbfe/watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c", size = 367425 }, + { url = "https://files.pythonhosted.org/packages/74/0d/1889e5649885484d29f6c792ef274454d0a26b20d6ed5fdba5409335ccb6/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361", size = 437705 }, + { url = "https://files.pythonhosted.org/packages/85/8a/01d9a22e839f0d1d547af11b1fcac6ba6f889513f1b2e6f221d9d60d9585/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3", size = 433636 }, + { url = "https://files.pythonhosted.org/packages/62/32/a93db78d340c7ef86cde469deb20e36c6b2a873edee81f610e94bbba4e06/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571", size = 451069 }, + { url = "https://files.pythonhosted.org/packages/99/c2/e9e2754fae3c2721c9a7736f92dab73723f1968ed72535fff29e70776008/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd", size = 469306 }, + { url = "https://files.pythonhosted.org/packages/4c/45/f317d9e3affb06c3c27c478de99f7110143e87f0f001f0f72e18d0e1ddce/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a", size = 476187 }, + { url = "https://files.pythonhosted.org/packages/ac/d3/f1f37248abe0114916921e638f71c7d21fe77e3f2f61750e8057d0b68ef2/watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e", size = 425743 }, + { url = "https://files.pythonhosted.org/packages/2b/e8/c7037ea38d838fd81a59cd25761f106ee3ef2cfd3261787bee0c68908171/watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c", size = 612327 }, + { url = "https://files.pythonhosted.org/packages/a0/c5/0e6e228aafe01a7995fbfd2a4edb221bb11a2744803b65a5663fb85e5063/watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188", size = 595096 }, + { url = "https://files.pythonhosted.org/packages/63/d5/4780e8bf3de3b4b46e7428a29654f7dc041cad6b19fd86d083e4b6f64bbe/watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735", size = 264149 }, + { url = "https://files.pythonhosted.org/packages/fe/1b/5148898ba55fc9c111a2a4a5fb67ad3fa7eb2b3d7f0618241ed88749313d/watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04", size = 277542 }, + { url = "https://files.pythonhosted.org/packages/85/02/366ae902cd81ca5befcd1854b5c7477b378f68861597cef854bd6dc69fbe/watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428", size = 375579 }, + { url = "https://files.pythonhosted.org/packages/bc/67/d8c9d256791fe312fea118a8a051411337c948101a24586e2df237507976/watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c", size = 367726 }, + { url = "https://files.pythonhosted.org/packages/b1/dc/a8427b21ef46386adf824a9fec4be9d16a475b850616cfd98cf09a97a2ef/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43", size = 437735 }, + { url = "https://files.pythonhosted.org/packages/3a/21/0b20bef581a9fbfef290a822c8be645432ceb05fb0741bf3c032e0d90d9a/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327", size = 433644 }, + { url = "https://files.pythonhosted.org/packages/1c/e8/d5e5f71cc443c85a72e70b24269a30e529227986096abe091040d6358ea9/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5", size = 450928 }, + { url = "https://files.pythonhosted.org/packages/61/ee/bf17f5a370c2fcff49e1fec987a6a43fd798d8427ea754ce45b38f9e117a/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61", size = 469072 }, + { url = "https://files.pythonhosted.org/packages/a3/34/03b66d425986de3fc6077e74a74c78da298f8cb598887f664a4485e55543/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15", size = 475517 }, + { url = "https://files.pythonhosted.org/packages/70/eb/82f089c4f44b3171ad87a1b433abb4696f18eb67292909630d886e073abe/watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823", size = 425480 }, + { url = "https://files.pythonhosted.org/packages/53/20/20509c8f5291e14e8a13104b1808cd7cf5c44acd5feaecb427a49d387774/watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab", size = 612322 }, + { url = "https://files.pythonhosted.org/packages/df/2b/5f65014a8cecc0a120f5587722068a975a692cadbe9fe4ea56b3d8e43f14/watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec", size = 595094 }, + { url = "https://files.pythonhosted.org/packages/18/98/006d8043a82c0a09d282d669c88e587b3a05cabdd7f4900e402250a249ac/watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d", size = 264191 }, + { url = "https://files.pythonhosted.org/packages/8a/8b/badd9247d6ec25f5f634a9b3d0d92e39c045824ec7e8afcedca8ee52c1e2/watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c", size = 277527 }, + { url = "https://files.pythonhosted.org/packages/af/19/35c957c84ee69d904299a38bae3614f7cede45f07f174f6d5a2f4dbd6033/watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633", size = 266253 }, + { url = "https://files.pythonhosted.org/packages/35/82/92a7bb6dc82d183e304a5f84ae5437b59ee72d48cee805a9adda2488b237/watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a", size = 374137 }, + { url = "https://files.pythonhosted.org/packages/87/91/49e9a497ddaf4da5e3802d51ed67ff33024597c28f652b8ab1e7c0f5718b/watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370", size = 367733 }, + { url = "https://files.pythonhosted.org/packages/0d/d8/90eb950ab4998effea2df4cf3a705dc594f6bc501c5a353073aa990be965/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6", size = 437322 }, + { url = "https://files.pythonhosted.org/packages/6c/a2/300b22e7bc2a222dd91fce121cefa7b49aa0d26a627b2777e7bdfcf1110b/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b", size = 433409 }, + { url = "https://files.pythonhosted.org/packages/99/44/27d7708a43538ed6c26708bcccdde757da8b7efb93f4871d4cc39cffa1cc/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e", size = 452142 }, + { url = "https://files.pythonhosted.org/packages/b0/ec/c4e04f755be003129a2c5f3520d2c47026f00da5ecb9ef1e4f9449637571/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea", size = 469414 }, + { url = "https://files.pythonhosted.org/packages/c5/4e/cdd7de3e7ac6432b0abf282ec4c1a1a2ec62dfe423cf269b86861667752d/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f", size = 472962 }, + { url = "https://files.pythonhosted.org/packages/27/69/e1da9d34da7fc59db358424f5d89a56aaafe09f6961b64e36457a80a7194/watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234", size = 425705 }, + { url = "https://files.pythonhosted.org/packages/e8/c1/24d0f7357be89be4a43e0a656259676ea3d7a074901f47022f32e2957798/watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef", size = 612851 }, + { url = "https://files.pythonhosted.org/packages/c7/af/175ba9b268dec56f821639c9893b506c69fd999fe6a2e2c51de420eb2f01/watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968", size = 594868 }, + { url = "https://files.pythonhosted.org/packages/44/81/1f701323a9f70805bc81c74c990137123344a80ea23ab9504a99492907f8/watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444", size = 264109 }, + { url = "https://files.pythonhosted.org/packages/b4/0b/32cde5bc2ebd9f351be326837c61bdeb05ad652b793f25c91cac0b48a60b/watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896", size = 277055 }, + { url = "https://files.pythonhosted.org/packages/4b/81/daade76ce33d21dbec7a15afd7479de8db786e5f7b7d249263b4ea174e08/watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418", size = 266169 }, + { url = "https://files.pythonhosted.org/packages/30/dc/6e9f5447ae14f645532468a84323a942996d74d5e817837a5c8ce9d16c69/watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48", size = 373764 }, + { url = "https://files.pythonhosted.org/packages/79/c0/c3a9929c372816c7fc87d8149bd722608ea58dc0986d3ef7564c79ad7112/watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90", size = 367873 }, + { url = "https://files.pythonhosted.org/packages/2e/11/ff9a4445a7cfc1c98caf99042df38964af12eed47d496dd5d0d90417349f/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94", size = 438381 }, + { url = "https://files.pythonhosted.org/packages/48/a3/763ba18c98211d7bb6c0f417b2d7946d346cdc359d585cc28a17b48e964b/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e", size = 432809 }, + { url = "https://files.pythonhosted.org/packages/30/4c/616c111b9d40eea2547489abaf4ffc84511e86888a166d3a4522c2ba44b5/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827", size = 451801 }, + { url = "https://files.pythonhosted.org/packages/b6/be/d7da83307863a422abbfeb12903a76e43200c90ebe5d6afd6a59d158edea/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df", size = 468886 }, + { url = "https://files.pythonhosted.org/packages/1d/d3/3dfe131ee59d5e90b932cf56aba5c996309d94dafe3d02d204364c23461c/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab", size = 472973 }, + { url = "https://files.pythonhosted.org/packages/42/6c/279288cc5653a289290d183b60a6d80e05f439d5bfdfaf2d113738d0f932/watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f", size = 425282 }, + { url = "https://files.pythonhosted.org/packages/d6/d7/58afe5e85217e845edf26d8780c2d2d2ae77675eeb8d1b8b8121d799ce52/watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b", size = 612540 }, + { url = "https://files.pythonhosted.org/packages/6d/d5/b96eeb9fe3fda137200dd2f31553670cbc731b1e13164fd69b49870b76ec/watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18", size = 593625 }, + { url = "https://files.pythonhosted.org/packages/c1/e5/c326fe52ee0054107267608d8cea275e80be4455b6079491dfd9da29f46f/watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07", size = 263899 }, + { url = "https://files.pythonhosted.org/packages/a6/8b/8a7755c5e7221bb35fe4af2dc44db9174f90ebf0344fd5e9b1e8b42d381e/watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366", size = 276622 }, + { url = "https://files.pythonhosted.org/packages/df/94/1ad200e937ec91b2a9d6b39ae1cf9c2b1a9cc88d5ceb43aa5c6962eb3c11/watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f", size = 376986 }, + { url = "https://files.pythonhosted.org/packages/ee/fd/d9e020d687ccf90fe95efc513fbb39a8049cf5a3ff51f53c59fcf4c47a5d/watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b", size = 369445 }, + { url = "https://files.pythonhosted.org/packages/43/cb/c0279b35053555d10ef03559c5aebfcb0c703d9c70a7b4e532df74b9b0e8/watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4", size = 439383 }, + { url = "https://files.pythonhosted.org/packages/8b/c4/08b3c2cda45db5169148a981c2100c744a4a222fa7ae7644937c0c002069/watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a", size = 426804 }, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 }, +] + +[[package]] +name = "websockets" +version = "13.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/1c/78687e0267b09412409ac134f10fd14d14ac6475da892a8b09a02d0f6ae2/websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e", size = 149769 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/bd/224fd6c4c0d60645444bb77cabf3633a6c14a47e2d03cdbc2136486c51f7/websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f", size = 150946 }, + { url = "https://files.pythonhosted.org/packages/44/5b/16f06fa678432d0cdbc55477bb6f0215c42b31615948bd63a884c294e0a5/websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c", size = 148600 }, + { url = "https://files.pythonhosted.org/packages/5a/33/c57b4ecdd26510ffcda37d30073097f1e9015b316fe21b513360bf2d8ee2/websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f", size = 148853 }, + { url = "https://files.pythonhosted.org/packages/71/a3/6a8a0e86c44fc39fab83fc6b946f9f7d53e5be6824916450dac637937086/websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543", size = 157935 }, + { url = "https://files.pythonhosted.org/packages/a0/58/ba14373234d2b7cce48031f7bd05ab2d23a11ffa0d35c3348d5729fa0527/websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d", size = 156949 }, + { url = "https://files.pythonhosted.org/packages/7d/8a/8e2319207bae70156d0505bf91e192de015ee91ccc5b1afb406bb7db3819/websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f", size = 157260 }, + { url = "https://files.pythonhosted.org/packages/03/cd/31ff415c4b0dc3c185bd87c412affdc5fab42c700b04d02b380bfb789310/websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8", size = 157661 }, + { url = "https://files.pythonhosted.org/packages/e8/58/a95d1dc6f589cbbfca0918d160ff27c920ab2e94637b750591c6f226cf27/websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b", size = 157078 }, + { url = "https://files.pythonhosted.org/packages/ce/02/207f49e1c22c8fad9e6353815de698e778d365609801dc2387e01e0f94a2/websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448", size = 157027 }, + { url = "https://files.pythonhosted.org/packages/3b/aa/e59d994712635e9e6bc883471e12cc493e3a704e4e22e9d4a59ff1491161/websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3", size = 151776 }, + { url = "https://files.pythonhosted.org/packages/7b/f9/83bc78788d6ce5492fa44133708584a885080aa7c790be2532f326948115/websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0", size = 152206 }, + { url = "https://files.pythonhosted.org/packages/20/95/e002ec55688b751d3c9cc131c1960af7e440d95e1954c441535b9da2bf36/websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7", size = 150948 }, + { url = "https://files.pythonhosted.org/packages/62/6b/85fb8c13b278db7d45e27ff6ee0db3009b0fadef7c37c85e6cb4a0fbf08e/websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4", size = 148599 }, + { url = "https://files.pythonhosted.org/packages/e8/2e/c80cafbab86f8c399ba8323efff298b7062055724146391443d266e9c49b/websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2", size = 148851 }, + { url = "https://files.pythonhosted.org/packages/2e/67/631d4b1f28fef6f12730c0cbe982203a9d6814768c2ab1e0a352d9a07a97/websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0", size = 158509 }, + { url = "https://files.pythonhosted.org/packages/9b/e8/ba740eab2a9c5b903ea94d9a2a448db63f0a296265aee976d17abf734758/websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e", size = 157507 }, + { url = "https://files.pythonhosted.org/packages/f8/4e/ffa2f1aad2da67e483fb7bad6c69f80c786f4e85d1942a39d7b275b084ed/websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462", size = 157881 }, + { url = "https://files.pythonhosted.org/packages/c0/85/0cbfe7b0e0dd3d885cd87b0523c6690ae7369feaf3aab5a23e95bdb4fefa/websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501", size = 158187 }, + { url = "https://files.pythonhosted.org/packages/39/29/d9df0a1daedebefaeea88fb8071539604df09fd0f1bfb73bf58333aa3eb6/websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418", size = 157626 }, + { url = "https://files.pythonhosted.org/packages/7d/9a/f88e186059f6b89f8bb08461d9fda7a26940b7b8897c7d7f02aead40b7e4/websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df", size = 157575 }, + { url = "https://files.pythonhosted.org/packages/cf/e4/ecdb8352ebab2e44c10b9d6f50008f95e30bb0a7ef0e6b66cb475d539d74/websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f", size = 151779 }, + { url = "https://files.pythonhosted.org/packages/12/40/46967d00640e6c3231b73d310617927a11c91bcc044dd5a0860a3c457c33/websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075", size = 152206 }, + { url = "https://files.pythonhosted.org/packages/4e/51/23ed2d239f1c3087c1431d41cfd159865df0bc35bb0c89973e3b6a0fff9b/websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a", size = 150953 }, + { url = "https://files.pythonhosted.org/packages/57/8d/814a7ef62b916b0f39108ad2e4d9b4cb0f8c640f8c30202fb63041598ada/websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956", size = 148610 }, + { url = "https://files.pythonhosted.org/packages/ad/8b/a378d21124011737e0e490a8a6ef778914b03e50c8d938de2f2170a20dbd/websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af", size = 148849 }, + { url = "https://files.pythonhosted.org/packages/46/d2/814a61226af313c1bc289cfe3a10f87bf426b6f2d9df0f927c47afab7612/websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf", size = 158772 }, + { url = "https://files.pythonhosted.org/packages/a1/7e/5987299eb7e131216c9027b05a65f149cbc2bde7c582e694d9eed6ec3d40/websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c", size = 157724 }, + { url = "https://files.pythonhosted.org/packages/94/6e/eaf95894042ba8a05a125fe8bcf9ee3572fef6edbcbf49478f4991c027cc/websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4", size = 158152 }, + { url = "https://files.pythonhosted.org/packages/ce/ba/a1315d569cc2dadaafda74a9cea16ab5d68142525937f1994442d969b306/websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab", size = 158442 }, + { url = "https://files.pythonhosted.org/packages/90/9b/59866695cfd05e785c90932fef3dae4682eb4e06e7076b7c53478f25faad/websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d", size = 157823 }, + { url = "https://files.pythonhosted.org/packages/9b/47/20af68a313b6453d2d094ccc497b7232e8475175d234e3e5bef5088521e5/websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237", size = 157818 }, + { url = "https://files.pythonhosted.org/packages/f8/bb/60aaedc80e388e978617dda1ff38788780c6b0f6e462b85368cb934131a5/websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185", size = 151785 }, + { url = "https://files.pythonhosted.org/packages/16/2e/e47692f569e1be2e66c1dbc5e85ea4d2cc93b80027fbafa28ae8b0dee52c/websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99", size = 152214 }, + { url = "https://files.pythonhosted.org/packages/46/37/d8ef4b68684d1fa368a5c64be466db07fc58b68163bc2496db2d4cc208ff/websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa", size = 150962 }, + { url = "https://files.pythonhosted.org/packages/95/49/78aeb3af08ec9887a9065e85cef9d7e199d6c6261fcd39eec087f3a62328/websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231", size = 148621 }, + { url = "https://files.pythonhosted.org/packages/31/0d/dc9b7cec8deaee452092a631ccda894bd7098859f71dd7639b4b5b9c615c/websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9", size = 148853 }, + { url = "https://files.pythonhosted.org/packages/16/bf/734cbd815d7bc94cffe35c934f4e08b619bf3b47df1c6c7af21c1d35bcfe/websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75", size = 158741 }, + { url = "https://files.pythonhosted.org/packages/af/9b/756f89b12fee8931785531a314e6f087b21774a7f8c60878e597c684f91b/websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553", size = 157690 }, + { url = "https://files.pythonhosted.org/packages/d3/37/31f97132d2262e666b797e250879ca833eab55115f88043b3952a2840eb8/websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920", size = 158132 }, + { url = "https://files.pythonhosted.org/packages/41/ce/59c8d44e148c002fec506a9527504fb4281676e2e75c2ee5a58180f1b99a/websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329", size = 158490 }, + { url = "https://files.pythonhosted.org/packages/1a/74/5b31ce0f318b902c0d70c031f8e1228ba1a4d95a46b2a24a2a5ac17f9cf0/websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7", size = 157879 }, + { url = "https://files.pythonhosted.org/packages/0d/a7/6eac4f04177644bbc98deb98d11770cc7fbc216f6f67ab187c150540fd52/websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2", size = 157873 }, + { url = "https://files.pythonhosted.org/packages/72/f6/b8b30a3b134dfdb4ccd1694befa48fddd43783957c988a1dab175732af33/websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb", size = 151782 }, + { url = "https://files.pythonhosted.org/packages/3e/88/d94ccc006c69583168aa9dd73b3f1885c8931f2c676f4bdd8cbfae91c7b6/websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b", size = 152212 }, + { url = "https://files.pythonhosted.org/packages/ae/d8/9d0e5c836f89147aa769b72e2d82217ae1c17ffd5f375de8d785e1e16870/websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89", size = 148629 }, + { url = "https://files.pythonhosted.org/packages/9c/ff/005a440db101d298b42cc7565579ed55a7e12ccc0c6ea0491e53bb073930/websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad", size = 148863 }, + { url = "https://files.pythonhosted.org/packages/9f/06/44d7c7d48e0beaecbacaf0020eafccd490741e496622da6b2a5626fe6689/websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e", size = 150226 }, + { url = "https://files.pythonhosted.org/packages/48/6f/861ba99aa3c5cb54412c3870d5549e466d82d2f7c440b435e23ca6496865/websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc", size = 149833 }, + { url = "https://files.pythonhosted.org/packages/8d/a0/9fb50648f69ed341e30096356a815c89c4f9daef24a32e9754dbdc3de8a8/websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333", size = 149778 }, + { url = "https://files.pythonhosted.org/packages/f1/ba/48b5b8343e6f62a8a809ffe987d4d7c911cedcb1b8353f3da615f2609893/websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32", size = 152259 }, + { url = "https://files.pythonhosted.org/packages/fd/bd/d34c4b7918453506d2149208b175368738148ffc4ba256d7fd8708956732/websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817", size = 145262 }, +] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/dd/f95a13d2b235a28d613ba23ebad55191514550debb968b46aab99f2e3a30/win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2", size = 3676 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/e6/a7d828fef907843b2a5773ebff47fb79ac0c1c88d60c0ca9530ee941e248/win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad", size = 3604 }, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/c6/5375258add3777494671d8cec27cdf5402abd91016dee24aa2972c61fedf/wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4", size = 37315 }, + { url = "https://files.pythonhosted.org/packages/32/12/e11adfde33444986135d8881b401e4de6cbb4cced046edc6b464e6ad7547/wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020", size = 38160 }, + { url = "https://files.pythonhosted.org/packages/70/7d/3dcc4a7e96f8d3e398450ec7703db384413f79bd6c0196e0e139055ce00f/wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440", size = 80419 }, + { url = "https://files.pythonhosted.org/packages/d1/c4/8dfdc3c2f0b38be85c8d9fdf0011ebad2f54e40897f9549a356bebb63a97/wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487", size = 72669 }, + { url = "https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf", size = 80271 }, + { url = "https://files.pythonhosted.org/packages/19/d4/cd33d3a82df73a064c9b6401d14f346e1d2fb372885f0295516ec08ed2ee/wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72", size = 84748 }, + { url = "https://files.pythonhosted.org/packages/ef/58/2fde309415b5fa98fd8f5f4a11886cbf276824c4c64d45a39da342fff6fe/wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0", size = 77522 }, + { url = "https://files.pythonhosted.org/packages/07/44/359e4724a92369b88dbf09878a7cde7393cf3da885567ea898e5904049a3/wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136", size = 84780 }, + { url = "https://files.pythonhosted.org/packages/88/8f/706f2fee019360cc1da652353330350c76aa5746b4e191082e45d6838faf/wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d", size = 35335 }, + { url = "https://files.pythonhosted.org/packages/19/2b/548d23362e3002ebbfaefe649b833fa43f6ca37ac3e95472130c4b69e0b4/wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2", size = 37528 }, + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313 }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890 }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118 }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746 }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556 }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523 }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614 }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316 }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322 }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055 }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291 }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374 }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896 }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738 }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568 }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653 }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362 }, +] + +[[package]] +name = "yarl" +version = "1.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/3d/4924f9ed49698bac5f112bc9b40aa007bbdcd702462c1df3d2e1383fb158/yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53", size = 162095 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/a3/4e67b1463c12ba178aace33b62468377473c77b33a95bcb12b67b2b93817/yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00", size = 188473 }, + { url = "https://files.pythonhosted.org/packages/f3/86/c0c76e69a390fb43533783582714e8a58003f443b81cac1605ce71cade00/yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d", size = 114362 }, + { url = "https://files.pythonhosted.org/packages/07/ef/e6bee78c1bf432de839148fe9fdc1cf5e7fbd6402d8b0b7d7a1522fb9733/yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e", size = 112537 }, + { url = "https://files.pythonhosted.org/packages/37/f4/3406e76ed71e4d3023dbae4514513a387e2e753cb8a4cadd6ff9ba08a046/yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc", size = 442573 }, + { url = "https://files.pythonhosted.org/packages/37/15/98b4951271a693142e551fea24bca1e96be71b5256b3091dbe8433532a45/yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec", size = 468046 }, + { url = "https://files.pythonhosted.org/packages/88/1a/f10b88c4d8200708cbc799aad978a37a0ab15a4a72511c60bed11ee585c4/yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf", size = 462124 }, + { url = "https://files.pythonhosted.org/packages/02/a3/97b527b5c4551c3b17fd095fe019435664330060b3879c8c1ae80985d4bc/yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49", size = 446807 }, + { url = "https://files.pythonhosted.org/packages/40/06/da47aae54f1bb8ac0668d68bbdde40ba761643f253b2c16fdb4362af8ca3/yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff", size = 431778 }, + { url = "https://files.pythonhosted.org/packages/ba/a1/54992cd68f61c11d975184f4c8a4c7f43a838e7c6ce183030a3fc0a257a6/yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad", size = 443702 }, + { url = "https://files.pythonhosted.org/packages/5c/8b/adf290dc272a1a30a0e9dc04e2e62486be80f371bd9da2e9899f8e6181f3/yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145", size = 448289 }, + { url = "https://files.pythonhosted.org/packages/fc/98/e6ad935fa009890b9ef2769266dc9dceaeee5a7f9a57bc7daf50b5b6c305/yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd", size = 471660 }, + { url = "https://files.pythonhosted.org/packages/91/5d/1ad82849ce3c02661395f5097878c58ecabc4dac5d2d98e4f85949386448/yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26", size = 469830 }, + { url = "https://files.pythonhosted.org/packages/e0/70/376046a7f69cfec814b97fb8bf1af6f16dcbe37fd0ef89a9f87b04156923/yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46", size = 457671 }, + { url = "https://files.pythonhosted.org/packages/33/49/825f84f9a5d26d26fbf82531cee3923f356e2d8efc1819b85ada508fa91f/yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91", size = 101184 }, + { url = "https://files.pythonhosted.org/packages/b0/29/2a08a45b9f2eddd1b840813698ee655256f43b507c12f7f86df947cf5f8f/yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998", size = 110175 }, + { url = "https://files.pythonhosted.org/packages/af/f1/f3e6be722461cab1e7c6aea657685897956d6e4743940d685d167914e31c/yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68", size = 188410 }, + { url = "https://files.pythonhosted.org/packages/4b/c1/21cc66b263fdc2ec10b6459aed5b239f07eed91a77438d88f0e1bd70e202/yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe", size = 114293 }, + { url = "https://files.pythonhosted.org/packages/31/7a/0ecab63a166a22357772f4a2852c859e2d5a7b02a5c58803458dd516e6b4/yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675", size = 112548 }, + { url = "https://files.pythonhosted.org/packages/57/5d/78152026864475e841fdae816499345364c8e364b45ea6accd0814a295f0/yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63", size = 485002 }, + { url = "https://files.pythonhosted.org/packages/d3/70/2e880d74aeb4908d45c6403e46bbd4aa866ae31ddb432318d9b8042fe0f6/yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27", size = 504850 }, + { url = "https://files.pythonhosted.org/packages/06/58/5676a47b6d2751853f89d1d68b6a54d725366da6a58482f2410fa7eb38af/yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5", size = 499291 }, + { url = "https://files.pythonhosted.org/packages/4d/e5/b56d535703a63a8d86ac82059e630e5ba9c0d5626d9c5ac6af53eed815c2/yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92", size = 487818 }, + { url = "https://files.pythonhosted.org/packages/f3/b4/6b95e1e0983593f4145518980b07126a27e2a4938cb6afb8b592ce6fc2c9/yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b", size = 470447 }, + { url = "https://files.pythonhosted.org/packages/a8/e5/5d349b7b04ed4247d4f717f271fce601a79d10e2ac81166c13f97c4973a9/yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a", size = 484544 }, + { url = "https://files.pythonhosted.org/packages/fa/dc/ce90e9d85ef2233e81148a9658e4ea8372c6de070ce96c5c8bd3ff365144/yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83", size = 482409 }, + { url = "https://files.pythonhosted.org/packages/4c/a1/17c0a03615b0cd213aee2e318a0fbd3d07259c37976d85af9eec6184c589/yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff", size = 512970 }, + { url = "https://files.pythonhosted.org/packages/6c/ed/1e317799d54c79a3e4846db597510f5c84fb7643bb8703a3848136d40809/yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c", size = 515203 }, + { url = "https://files.pythonhosted.org/packages/7a/37/9a4e2d73953956fa686fa0f0c4a0881245f39423fa75875d981b4f680611/yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e", size = 497323 }, + { url = "https://files.pythonhosted.org/packages/a3/c3/a25ae9c85c0e50a8722aecc486ac5ba53b28d1384548df99b2145cb69862/yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6", size = 101226 }, + { url = "https://files.pythonhosted.org/packages/90/6d/c62ba0ae0232a0b0012706a7735a16b44a03216fedfb6ea0bcda79d1e12c/yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b", size = 110471 }, + { url = "https://files.pythonhosted.org/packages/3b/05/379002019a0c9d5dc0c4cc6f71e324ea43461ae6f58e94ee87e07b8ffa90/yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0", size = 189044 }, + { url = "https://files.pythonhosted.org/packages/23/d5/e62cfba5ceaaf92ee4f9af6f9c9ab2f2b47d8ad48687fa69570a93b0872c/yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265", size = 114867 }, + { url = "https://files.pythonhosted.org/packages/b1/10/6abc0bd7e7fe7c6b9b9e9ce0ff558912c9ecae65a798f5442020ef9e4177/yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867", size = 112737 }, + { url = "https://files.pythonhosted.org/packages/37/a5/ad026afde5efe1849f4f55bd9f9a2cb5b006511b324db430ae5336104fb3/yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd", size = 482887 }, + { url = "https://files.pythonhosted.org/packages/f8/82/b8bee972617b800319b4364cfcd69bfaf7326db052e91a56e63986cc3e05/yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef", size = 498635 }, + { url = "https://files.pythonhosted.org/packages/af/ad/ac688503b134e02e8505415f0b8e94dc8e92a97e82abdd9736658389b5ae/yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8", size = 496198 }, + { url = "https://files.pythonhosted.org/packages/ce/f2/b6cae0ad1afed6e95f82ab2cb9eb5b63e41f1463ece2a80c39d80cf6167a/yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870", size = 489068 }, + { url = "https://files.pythonhosted.org/packages/c8/f4/355e69b5563154b40550233ffba8f6099eac0c99788600191967763046cf/yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2", size = 468286 }, + { url = "https://files.pythonhosted.org/packages/26/3d/3c37f3f150faf87b086f7915724f2fcb9ff2f7c9d3f6c0f42b7722bd9b77/yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/94/ee/d591abbaea3b14e0f68bdec5cbcb75f27107190c51889d518bafe5d8f120/yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa", size = 484947 }, + { url = "https://files.pythonhosted.org/packages/57/70/ad1c65a13315f03ff0c63fd6359dd40d8198e2a42e61bf86507602a0364f/yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff", size = 505610 }, + { url = "https://files.pythonhosted.org/packages/4c/8c/6086dec0f8d7df16d136b38f373c49cf3d2fb94464e5a10bf788b36f3f54/yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239", size = 515951 }, + { url = "https://files.pythonhosted.org/packages/49/79/e0479e9a3bbb7bdcb82779d89711b97cea30902a4bfe28d681463b7071ce/yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45", size = 501273 }, + { url = "https://files.pythonhosted.org/packages/8e/85/eab962453e81073276b22f3d1503dffe6bfc3eb9cd0f31899970de05d490/yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447", size = 101139 }, + { url = "https://files.pythonhosted.org/packages/5d/de/618b3e5cab10af8a2ed3eb625dac61c1d16eb155d1f56f9fdb3500786c12/yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639", size = 110504 }, + { url = "https://files.pythonhosted.org/packages/07/b7/948e4f427817e0178f3737adf6712fea83f76921e11e2092f403a8a9dc4a/yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c", size = 185061 }, + { url = "https://files.pythonhosted.org/packages/f3/67/8d91ad79a3b907b4fef27fafa912350554443ba53364fff3c347b41105cb/yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e", size = 113056 }, + { url = "https://files.pythonhosted.org/packages/a1/77/6b2348a753702fa87f435cc33dcec21981aaca8ef98a46566a7b29940b4a/yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93", size = 110958 }, + { url = "https://files.pythonhosted.org/packages/8e/3e/6eadf32656741549041f549a392f3b15245d3a0a0b12a9bc22bd6b69621f/yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d", size = 470326 }, + { url = "https://files.pythonhosted.org/packages/3d/a4/1b641a8c7899eeaceec45ff105a2e7206ec0eb0fb9d86403963cc8521c5e/yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7", size = 484778 }, + { url = "https://files.pythonhosted.org/packages/8a/f5/80c142f34779a5c26002b2bf1f73b9a9229aa9e019ee6f9fd9d3e9704e78/yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089", size = 485568 }, + { url = "https://files.pythonhosted.org/packages/f8/f2/6b40ffea2d5d3a11f514ab23c30d14f52600c36a3210786f5974b6701bb8/yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5", size = 477801 }, + { url = "https://files.pythonhosted.org/packages/4c/1a/e60c116f3241e4842ed43c104eb2751abe02f6bac0301cdae69e4fda9c3a/yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5", size = 455361 }, + { url = "https://files.pythonhosted.org/packages/b9/98/fe0aeee425a4bc5cd3ed86e867661d2bfa782544fa07a8e3dcd97d51ae3d/yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786", size = 473893 }, + { url = "https://files.pythonhosted.org/packages/6b/9b/677455d146bd3cecd350673f0e4bb28854af66726493ace3b640e9c5552b/yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318", size = 476407 }, + { url = "https://files.pythonhosted.org/packages/33/ca/ce85766247a9a9b56654428fb78a3e14ea6947a580a9c4e891b3aa7da322/yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82", size = 490848 }, + { url = "https://files.pythonhosted.org/packages/6d/d6/717f0f19bcf2c4705ad95550b4b6319a0d8d1d4f137ea5e223207f00df50/yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a", size = 501084 }, + { url = "https://files.pythonhosted.org/packages/14/b5/b93c70d9a462b802c8df65c64b85f49d86b4ba70c393fbad95cf7ec053cb/yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da", size = 491776 }, + { url = "https://files.pythonhosted.org/packages/03/0f/5a52eaa402a6a93265ba82f42c6f6085ccbe483e1b058ad34207e75812b1/yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979", size = 485250 }, + { url = "https://files.pythonhosted.org/packages/dd/97/946d26a5d82706a6769399cabd472c59f9a3227ce1432afb4739b9c29572/yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367", size = 492590 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/841f7d706137bdc8b741c6826106b6f703155076d58f1830f244da857451/yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38", size = 38648 }, +] + +[[package]] +name = "zipp" +version = "3.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 }, +] + +[[package]] +name = "zstandard" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/55/bd0487e86679db1823fc9ee0d8c9c78ae2413d34c0b461193b5f4c31d22f/zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9", size = 788701 }, + { url = "https://files.pythonhosted.org/packages/e1/8a/ccb516b684f3ad987dfee27570d635822e3038645b1a950c5e8022df1145/zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880", size = 633678 }, + { url = "https://files.pythonhosted.org/packages/12/89/75e633d0611c028e0d9af6df199423bf43f54bea5007e6718ab7132e234c/zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc", size = 4941098 }, + { url = "https://files.pythonhosted.org/packages/4a/7a/bd7f6a21802de358b63f1ee636ab823711c25ce043a3e9f043b4fcb5ba32/zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573", size = 5308798 }, + { url = "https://files.pythonhosted.org/packages/79/3b/775f851a4a65013e88ca559c8ae42ac1352db6fcd96b028d0df4d7d1d7b4/zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391", size = 5341840 }, + { url = "https://files.pythonhosted.org/packages/09/4f/0cc49570141dd72d4d95dd6fcf09328d1b702c47a6ec12fbed3b8aed18a5/zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e", size = 5440337 }, + { url = "https://files.pythonhosted.org/packages/e7/7c/aaa7cd27148bae2dc095191529c0570d16058c54c4597a7d118de4b21676/zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd", size = 4861182 }, + { url = "https://files.pythonhosted.org/packages/ac/eb/4b58b5c071d177f7dc027129d20bd2a44161faca6592a67f8fcb0b88b3ae/zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4", size = 4932936 }, + { url = "https://files.pythonhosted.org/packages/44/f9/21a5fb9bb7c9a274b05ad700a82ad22ce82f7ef0f485980a1e98ed6e8c5f/zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea", size = 5464705 }, + { url = "https://files.pythonhosted.org/packages/49/74/b7b3e61db3f88632776b78b1db597af3f44c91ce17d533e14a25ce6a2816/zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2", size = 4857882 }, + { url = "https://files.pythonhosted.org/packages/4a/7f/d8eb1cb123d8e4c541d4465167080bec88481ab54cd0b31eb4013ba04b95/zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9", size = 4697672 }, + { url = "https://files.pythonhosted.org/packages/5e/05/f7dccdf3d121309b60342da454d3e706453a31073e2c4dac8e1581861e44/zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a", size = 5206043 }, + { url = "https://files.pythonhosted.org/packages/86/9d/3677a02e172dccd8dd3a941307621c0cbd7691d77cb435ac3c75ab6a3105/zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0", size = 5667390 }, + { url = "https://files.pythonhosted.org/packages/41/7e/0012a02458e74a7ba122cd9cafe491facc602c9a17f590367da369929498/zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c", size = 5198901 }, + { url = "https://files.pythonhosted.org/packages/65/3a/8f715b97bd7bcfc7342d8adcd99a026cb2fb550e44866a3b6c348e1b0f02/zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813", size = 430596 }, + { url = "https://files.pythonhosted.org/packages/19/b7/b2b9eca5e5a01111e4fe8a8ffb56bdcdf56b12448a24effe6cfe4a252034/zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4", size = 495498 }, + { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699 }, + { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681 }, + { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328 }, + { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955 }, + { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944 }, + { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927 }, + { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910 }, + { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544 }, + { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094 }, + { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440 }, + { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091 }, + { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682 }, + { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707 }, + { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792 }, + { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586 }, + { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420 }, + { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459 }, + { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707 }, + { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545 }, + { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533 }, + { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510 }, + { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973 }, + { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968 }, + { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179 }, + { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577 }, + { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899 }, + { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964 }, + { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398 }, + { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313 }, + { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877 }, + { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595 }, + { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975 }, + { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448 }, + { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269 }, + { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228 }, + { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891 }, + { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310 }, + { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912 }, + { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946 }, + { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994 }, + { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681 }, + { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239 }, + { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149 }, + { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392 }, + { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299 }, + { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862 }, + { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578 }, +] diff --git a/src/backend/langflow/version/__init__.py b/src/backend/langflow/version/__init__.py index 336c63fea7bc..7980c0451962 100644 --- a/src/backend/langflow/version/__init__.py +++ b/src/backend/langflow/version/__init__.py @@ -1 +1 @@ -from .version import __version__, is_pre_release # noqa: F401 +"""Version package.""" diff --git a/src/backend/langflow/version/version.py b/src/backend/langflow/version/version.py index dce827029431..377539fb6bcc 100644 --- a/src/backend/langflow/version/version.py +++ b/src/backend/langflow/version/version.py @@ -1,10 +1,43 @@ -from importlib import metadata - -try: - __version__ = metadata.version("langflow") - # Check if the version is a pre-release version - is_pre_release = any(label in __version__ for label in ["a", "b", "rc", "dev", "post"]) -except metadata.PackageNotFoundError: - __version__ = "" - is_pre_release = False -del metadata +"""Module for package versioning.""" + +import contextlib + + +def get_version() -> str: + """Retrieves the version of the package from a possible list of package names. + + This accounts for after package names are updated for -nightly builds. + + Returns: + str: The version of the package + + Raises: + ValueError: If the package is not found from the list of package names. + """ + from importlib import metadata + + pkg_names = [ + "langflow", + "langflow-base", + "langflow-nightly", + "langflow-base-nightly", + ] + _version = None + for pkg_name in pkg_names: + with contextlib.suppress(ImportError, metadata.PackageNotFoundError): + _version = metadata.version(pkg_name) + + if _version is None: + msg = f"Package not found from options {pkg_names}" + raise ValueError(msg) + + return _version + + +def is_pre_release(v: str) -> bool: + """Returns a boolean indicating whether the version is a pre-release version. + + Returns a boolean indicating whether the version is a pre-release version, + as per the definition of a pre-release segment from PEP 440. + """ + return any(label in v for label in ["a", "b", "rc"]) diff --git a/src/backend/tests/.test_durations b/src/backend/tests/.test_durations index fb51e5b430f0..67d867e18eaa 100644 --- a/src/backend/tests/.test_durations +++ b/src/backend/tests/.test_durations @@ -1,4 +1,10 @@ { + "src/backend/tests/performance/test_server_init.py::test_create_starter_projects": 9.349637124105357, + "src/backend/tests/performance/test_server_init.py::test_get_and_cache_all_types_dict": 0.009816041041631252, + "src/backend/tests/performance/test_server_init.py::test_initialize_services": 0.5938955409801565, + "src/backend/tests/performance/test_server_init.py::test_initialize_super_user": 0.31904370902338997, + "src/backend/tests/performance/test_server_init.py::test_load_flows": 0.002782625029794872, + "src/backend/tests/performance/test_server_init.py::test_setup_llm_caching": 0.01124733401229605, "src/backend/tests/test_endpoints.py::test_build_vertex_invalid_flow_id": 1.8161861660000795, "src/backend/tests/test_endpoints.py::test_build_vertex_invalid_vertex_id": 1.6184064170001875, "src/backend/tests/test_endpoints.py::test_get_all": 3.8724166670003797, @@ -61,302 +67,786 @@ "src/backend/tests/test_webhook.py::test_webhook_endpoint": 8.848518459000388, "src/backend/tests/test_webhook.py::test_webhook_flow_on_run_endpoint": 4.675444458000584, "src/backend/tests/test_webhook.py::test_webhook_with_random_payload": 5.161753501000476, - "src/backend/tests/unit/api/test_api_utils.py::test_get_outdated_components": 1.1425726240004224, - "src/backend/tests/unit/api/test_api_utils.py::test_get_suggestion_message": 1.7624517080002988, - "src/backend/tests/unit/components/prompts/test_prompt_component.py::TestPromptComponent::test_post_code_processing": 0.0013335419994291442, - "src/backend/tests/unit/custom/custom_component/test_component.py::test_set_invalid_output": 0.00042366600018795, - "src/backend/tests/unit/exceptions/test_api.py::test_api_exception": 1.1389405410004656, - "src/backend/tests/unit/exceptions/test_api.py::test_api_exception_no_flow": 1.152361500999632, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph": 0.010227958999621478, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional": 0.010483666000254743, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_async_start": 0.011119625999981508, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_start": 0.011757042999761325, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_start_end": 0.023863916999744106, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_not_prepared": 0.017605375000130152, - "src/backend/tests/unit/graph/graph/test_base.py::test_graph_with_edge": 0.011328083999615046, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_add_to_vertices_being_run": 1.1834923340002206, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_are_all_predecessors_fulfilled": 1.1408760409995011, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_are_all_predecessors_fulfilled__wrong": 1.9469006670001363, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_build_run_map": 1.1431461249999302, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict": 1.1375733340000806, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_run_map__bad_case": 1.1561181250003756, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_run_predecessors__bad_case": 1.1398941239999658, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_vertices_being_run__bad_case": 1.1692108339998413, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_vertices_to_run__bad_case": 1.8318268329999228, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable": 1.1589285000000018, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_is_active": 1.1601161660000798, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_run_predecessors": 1.1846444589996281, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_vertices_to_run": 1.156324541000231, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_pickle": 1.1490497489999143, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_remove_from_predecessors": 1.17061966700021, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_remove_vertex_from_runnables": 1.1513506659998711, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_to_dict": 1.0720182910004041, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_run_state": 1.152722833000098, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_vertex_run_state": 1.1509739160001118, - "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_vertex_run_state__bad_case": 1.1737124579994997, - "src/backend/tests/unit/graph/graph/test_utils.py::test_get_successors_a": 1.1541486669998449, - "src/backend/tests/unit/graph/graph/test_utils.py::test_get_successors_z": 1.152469375999317, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_a": 1.2232545840001876, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_g": 1.2133457499999167, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_h": 1.206607958999939, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_invalid_vertex": 1.2181356250002864, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_m": 1.2004494170000726, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_n_is_start": 1.1721724999997605, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_t": 1.233656458999576, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_x": 2.1063177910000377, - "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_z": 1.1587511679999807, - "src/backend/tests/unit/graph/test_graph.py::test_build_edges": 1.2613786659999278, - "src/backend/tests/unit/graph/test_graph.py::test_build_nodes": 1.236596333000307, - "src/backend/tests/unit/graph/test_graph.py::test_build_params": 1.1992265409999163, - "src/backend/tests/unit/graph/test_graph.py::test_circular_dependencies": 1.2119703740004297, - "src/backend/tests/unit/graph/test_graph.py::test_find_last_node": 1.1726129999997283, - "src/backend/tests/unit/graph/test_graph.py::test_get_node": 1.2559688749997804, - "src/backend/tests/unit/graph/test_graph.py::test_get_node_neighbors_basic": 2.205406665999817, - "src/backend/tests/unit/graph/test_graph.py::test_get_root_vertex": 1.2303205419998449, - "src/backend/tests/unit/graph/test_graph.py::test_get_vertices_with_target": 1.2130661250002959, - "src/backend/tests/unit/graph/test_graph.py::test_graph_structure": 1.226738209999894, - "src/backend/tests/unit/graph/test_graph.py::test_invalid_node_types": 1.218165749000491, - "src/backend/tests/unit/graph/test_graph.py::test_matched_type": 1.2096231260002241, - "src/backend/tests/unit/graph/test_graph.py::test_pickle_graph": 1.1924404169999434, - "src/backend/tests/unit/graph/test_graph.py::test_process_flow": 1.198885958000119, - "src/backend/tests/unit/graph/test_graph.py::test_process_flow_one_group": 1.1860583339998811, - "src/backend/tests/unit/graph/test_graph.py::test_process_flow_vector_store_grouped": 1.1757077080001181, - "src/backend/tests/unit/graph/test_graph.py::test_set_new_target_handle": 1.2467699590001757, - "src/backend/tests/unit/graph/test_graph.py::test_ungroup_node": 1.200591749999603, - "src/backend/tests/unit/graph/test_graph.py::test_update_source_handle": 1.2464087500002279, - "src/backend/tests/unit/graph/test_graph.py::test_update_target_handle_proxy": 1.2738795420000315, - "src/backend/tests/unit/graph/test_graph.py::test_update_template": 2.3875174170002538, - "src/backend/tests/unit/graph/test_graph.py::test_validate_edges": 1.2202941670002474, - "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot": 1.2400888339998346, - "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot_dump_components_and_edges": 1.2050360830003228, - "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot_dump_structure": 1.226008957999511, - "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag": 0.07454762600036702, - "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_add": 0.06540679199997612, - "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_dump": 0.03427487499993731, - "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_dump_components_and_edges": 0.03775970900005632, - "src/backend/tests/unit/inputs/test_inputs.py::test_bool_input_invalid": 0.0001977499996428378, - "src/backend/tests/unit/inputs/test_inputs.py::test_bool_input_valid": 0.0003290839995315764, - "src/backend/tests/unit/inputs/test_inputs.py::test_data_input_valid": 0.0005500840002241603, - "src/backend/tests/unit/inputs/test_inputs.py::test_dict_input_invalid": 0.00021833400023751892, - "src/backend/tests/unit/inputs/test_inputs.py::test_dict_input_valid": 0.0002739589999691816, - "src/backend/tests/unit/inputs/test_inputs.py::test_dropdown_input_invalid": 0.00028154100027677487, - "src/backend/tests/unit/inputs/test_inputs.py::test_dropdown_input_valid": 0.00018683299958865973, - "src/backend/tests/unit/inputs/test_inputs.py::test_file_input_valid": 0.0005212080000092101, - "src/backend/tests/unit/inputs/test_inputs.py::test_float_input_invalid": 0.00016741600029490655, - "src/backend/tests/unit/inputs/test_inputs.py::test_float_input_valid": 0.0001750840001477627, - "src/backend/tests/unit/inputs/test_inputs.py::test_handle_input_invalid": 0.00030137500016280683, - "src/backend/tests/unit/inputs/test_inputs.py::test_handle_input_valid": 0.00028079100002287305, - "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_comprehensive": 0.00022679100038658362, - "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_invalid": 0.0002707490002649138, - "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_valid": 0.000520291999237088, - "src/backend/tests/unit/inputs/test_inputs.py::test_int_input_invalid": 0.000933000999793876, - "src/backend/tests/unit/inputs/test_inputs.py::test_int_input_valid": 0.00018916599947260693, - "src/backend/tests/unit/inputs/test_inputs.py::test_message_text_input_invalid": 0.0002494579998710833, - "src/backend/tests/unit/inputs/test_inputs.py::test_message_text_input_valid": 0.00048650000053385156, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_input_invalid": 0.0018433750001349836, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_input_valid": 0.0006555419995493139, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_secret_input_invalid": 0.0003428750001148728, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_secret_input_valid": 0.001843831999849499, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiselect_input_invalid": 0.00019454099992799456, - "src/backend/tests/unit/inputs/test_inputs.py::test_multiselect_input_valid": 0.0003385830000297574, - "src/backend/tests/unit/inputs/test_inputs.py::test_nested_dict_input_invalid": 0.00046958300026744837, - "src/backend/tests/unit/inputs/test_inputs.py::test_nested_dict_input_valid": 0.0007980010000210314, - "src/backend/tests/unit/inputs/test_inputs.py::test_prompt_input_valid": 0.0006562079997820547, - "src/backend/tests/unit/inputs/test_inputs.py::test_secret_str_input_invalid": 0.00018541600002208725, - "src/backend/tests/unit/inputs/test_inputs.py::test_secret_str_input_valid": 0.00017520799929116038, - "src/backend/tests/unit/inputs/test_inputs.py::test_str_input_invalid": 0.00023595900029249606, - "src/backend/tests/unit/inputs/test_inputs.py::test_str_input_valid": 0.00019979200033048983, - "src/backend/tests/unit/inputs/test_inputs.py::test_table_input_invalid": 0.00026916500019069645, - "src/backend/tests/unit/inputs/test_inputs.py::test_table_input_valid": 0.00023066599987942027, - "src/backend/tests/unit/schema/test_schema_message.py::test_message_async_prompt_serialization": 0.2658414169995922, - "src/backend/tests/unit/schema/test_schema_message.py::test_message_prompt_serialization": 0.0005853330003446899, - "src/backend/tests/unit/test_api_key.py::test_create_api_key": 1.668041834000178, - "src/backend/tests/unit/test_api_key.py::test_delete_api_key": 1.6283347500002492, - "src/backend/tests/unit/test_api_key.py::test_get_api_keys": 1.5674538329994903, + "src/backend/tests/unit/api/test_api_utils.py::test_get_outdated_components": 0.00046070897951722145, + "src/backend/tests/unit/api/test_api_utils.py::test_get_suggestion_message": 0.0004775830893777311, + "src/backend/tests/unit/api/v1/test_api_key.py::test_create_api_key_route": 2.5889793329988606, + "src/backend/tests/unit/api/v1/test_api_key.py::test_create_folder": 5.879420668003149, + "src/backend/tests/unit/api/v1/test_api_key.py::test_delete_api_key_route": 5.08323762496002, + "src/backend/tests/unit/api/v1/test_api_key.py::test_save_store_api_key": 2.5483837079955265, + "src/backend/tests/unit/api/v1/test_endpoints.py::test_get_config": 1.8300487080705352, + "src/backend/tests/unit/api/v1/test_endpoints.py::test_get_version": 15.56689295801334, + "src/backend/tests/unit/api/v1/test_endpoints.py::test_update_component_outputs": 2.663210333965253, + "src/backend/tests/unit/api/v1/test_flows.py::test_create_flow": 4.495548666047398, + "src/backend/tests/unit/api/v1/test_flows.py::test_create_flows": 3.1307485009310767, + "src/backend/tests/unit/api/v1/test_flows.py::test_read_basic_examples": 2.3698979579494335, + "src/backend/tests/unit/api/v1/test_flows.py::test_read_flow": 2.789128624019213, + "src/backend/tests/unit/api/v1/test_flows.py::test_read_flows": 4.866618833038956, + "src/backend/tests/unit/api/v1/test_flows.py::test_update_flow": 2.358723249984905, + "src/backend/tests/unit/api/v1/test_folders.py::test_create_folder": 4.585997875023168, + "src/backend/tests/unit/api/v1/test_folders.py::test_read_folder": 2.081491626042407, + "src/backend/tests/unit/api/v1/test_folders.py::test_read_folders": 2.056147459021304, + "src/backend/tests/unit/api/v1/test_folders.py::test_update_folder": 13.467484833963681, + "src/backend/tests/unit/api/v1/test_starter_projects.py::test_get_starter_projects": 5.260392207012046, + "src/backend/tests/unit/api/v1/test_store.py::test_check_if_store_is_enabled": 1.7977962500299327, + "src/backend/tests/unit/api/v1/test_users.py::test_add_user": 2.418213166005444, + "src/backend/tests/unit/api/v1/test_users.py::test_delete_user": 5.889092375058681, + "src/backend/tests/unit/api/v1/test_users.py::test_patch_user": 2.9018829600536264, + "src/backend/tests/unit/api/v1/test_users.py::test_read_all_users": 2.8212402089848183, + "src/backend/tests/unit/api/v1/test_users.py::test_read_current_user": 2.27289370901417, + "src/backend/tests/unit/api/v1/test_users.py::test_reset_password": 2.837956835050136, + "src/backend/tests/unit/api/v1/test_validate.py::test_post_validate_code": 3.654669084062334, + "src/backend/tests/unit/api/v1/test_validate.py::test_post_validate_prompt": 3.0125097080599517, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable": 1.7783849989646114, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__Exception": 5.891528583015315, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__HTTPException": 2.8841335409670137, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__exception": 4.416810167022049, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__httpexception": 2.779732874012552, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__variable_name_alread_exists": 3.690157334029209, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__variable_name_already_exists": 13.62650291697355, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__variable_name_and_value_cannot_be_empty": 2.4213992079603486, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__variable_name_cannot_be_empty": 2.686067708011251, + "src/backend/tests/unit/api/v1/test_variable.py::test_create_variable__variable_value_cannot_be_empty": 2.3703256670269184, + "src/backend/tests/unit/api/v1/test_variable.py::test_delete_variable": 1.8349529590341263, + "src/backend/tests/unit/api/v1/test_variable.py::test_delete_variable__Exception": 3.1565893749939278, + "src/backend/tests/unit/api/v1/test_variable.py::test_delete_variable__exception": 13.154085125017446, + "src/backend/tests/unit/api/v1/test_variable.py::test_read_variables": 2.757411584025249, + "src/backend/tests/unit/api/v1/test_variable.py::test_read_variables__": 3.637667417060584, + "src/backend/tests/unit/api/v1/test_variable.py::test_read_variables__empty": 3.7034704579855315, + "src/backend/tests/unit/api/v1/test_variable.py::test_update_variable": 2.858990293054376, + "src/backend/tests/unit/api/v1/test_variable.py::test_update_variable__Exception": 3.202228542009834, + "src/backend/tests/unit/api/v1/test_variable.py::test_update_variable__exception": 4.347305501054507, + "src/backend/tests/unit/base/load/test_load.py::test_run_flow_from_json_params": 0.0014367500552907586, + "src/backend/tests/unit/base/models/test_model_constants.py::test_provider_names": 0.024663168034749106, + "src/backend/tests/unit/base/tools/test_component_tool.py::test_component_tool": 0.04467487393412739, + "src/backend/tests/unit/base/tools/test_component_toolkit.py::test_component_tool": 0.1196131250471808, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_chain_end_event": 0.0031461670296266675, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_chain_start_event": 0.41959074995247647, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_chain_stream_event": 0.003502250008750707, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_end_empty_data": 0.003317043010611087, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_end_no_output": 0.0031987499678507447, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_end_with_empty_return_values": 0.0031257080263458192, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_end_with_output": 0.002732874942012131, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_start_no_input": 0.0035242490121163428, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_start_with_input": 0.01647449895972386, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_stream_no_output": 0.004321499960497022, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_chain_stream_with_output": 0.0033161240280605853, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_tool_end": 0.0034351240028627217, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_tool_error": 0.0045179169974289834, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_handle_on_tool_start": 0.004680416022893041, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_multiple_events": 0.003887208004016429, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_tool_end_event": 0.002465625002514571, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_tool_error_event": 0.00482704205205664, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_tool_start_event": 0.005946709017734975, + "src/backend/tests/unit/components/agents/test_agent_events.py::test_unknown_event": 0.002935541037004441, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_correctly_builds_output_model": 0.0057195828994736075, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_empty_output_schema": 0.0016070419223979115, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_handles_multiple_outputs": 0.003055208013392985, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_invalid_llm_config": 0.0011500419932417572, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_invalid_output_schema_type": 0.0012654169695451856, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_large_input_value": 0.0019090840360149741, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_nested_output_schema": 0.0024119180161505938, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_raises_value_error_for_unsupported_language_model": 0.0017231259844265878, + "src/backend/tests/unit/components/helpers/test_structured_output_component.py::TestStructuredOutputComponent::test_successful_structured_output_generation_with_patch_with_config": 0.009122917021159083, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_build_model": 0.0020211669616401196, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_get_model_failure": 0.0068002091138623655, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_get_model_success": 0.015780292043928057, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_update_build_config_keep_alive": 0.0008187499479390681, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_update_build_config_mirostat_disabled": 0.0013394170091487467, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_update_build_config_mirostat_enabled": 0.0016756660188548267, + "src/backend/tests/unit/components/models/test_ChatOllama_component.py::test_update_build_config_model_name": 0.0062951669679023325, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_build_model": 0.05815512494882569, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_get_model_failure": 0.023584623995702714, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_get_model_success": 0.021688584005460143, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_update_build_config_keep_alive": 0.003615666995756328, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_update_build_config_mirostat_disabled": 0.0030736250337213278, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_update_build_config_mirostat_enabled": 0.0017204579780809581, + "src/backend/tests/unit/components/models/test_chatollama_component.py::test_update_build_config_model_name": 0.018433375051245093, + "src/backend/tests/unit/components/models/test_huggingface.py::test_huggingface_inputs": 0.001714667014311999, + "src/backend/tests/unit/components/prompts/test_prompt_component.py::TestPromptComponent::test_post_code_processing": 0.0015109580126591027, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_build_data": 0.0016140410443767905, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_get_data": 0.0008367500267922878, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_update_build_config": 0.001162625034339726, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_update_build_config_exceed_limit": 0.005139043089002371, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_validate_text_key_invalid": 0.0006456660339608788, + "src/backend/tests/unit/components/prototypes/test_create_data_component.py::test_validate_text_key_valid": 0.0006595409940928221, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_build_data": 0.006239999027457088, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_get_data": 0.0011950000771321356, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_update_build_config": 0.0019992499728687108, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_update_build_config_exceed_limit": 0.002255084051284939, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_validate_text_key_invalid": 0.000936791009735316, + "src/backend/tests/unit/components/prototypes/test_update_data_component.py::test_validate_text_key_valid": 0.0009394589578732848, + "src/backend/tests/unit/components/tools/test_python_repl_tool.py::test_python_repl_tool_template": 0.011357625015079975, + "src/backend/tests/unit/components/tools/test_yfinance_tool.py::test_yfinance_tool_template": 0.011375251051504165, + "src/backend/tests/unit/custom/component/test_component_to_tool.py::test_component_to_tool": 0.019733334018383175, + "src/backend/tests/unit/custom/component/test_component_to_tool.py::test_component_to_tool_has_no_component_as_tool": 0.0017144169833045453, + "src/backend/tests/unit/custom/component/test_component_to_tool.py::test_component_to_toolkit": 0.004081916995346546, + "src/backend/tests/unit/custom/component/test_componet_set_functionality.py::test_set_with_message_text_input_list": 0.0017268330557271838, + "src/backend/tests/unit/custom/component/test_componet_set_functionality.py::test_set_with_mixed_list_input": 0.004446917038876563, + "src/backend/tests/unit/custom/custom_component/test_component.py::test_set_component": 0.0015936249983496964, + "src/backend/tests/unit/custom/custom_component/test_component.py::test_set_invalid_output": 0.0032142079435288906, + "src/backend/tests/unit/custom/custom_component/test_component.py::test_set_required_inputs": 0.0009162090136669576, + "src/backend/tests/unit/custom/custom_component/test_component.py::test_set_required_inputs_various_components": 0.009080707968678325, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_build_results": 1.522718749998603, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_error_handling": 4.2030872499453835, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_logging": 1.9351595000480302, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_message_sending": 11.93795562494779, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_streaming_message": 1.8019379999605007, + "src/backend/tests/unit/custom/custom_component/test_component_events.py::test_component_tool_output": 1.8185450420132838, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_custom_update": 0.0008805410470813513, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_invalid_output": 0.000512125960085541, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_output_validation": 0.002503083029296249, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_tool_mode": 0.0013509579584933817, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_with_existing_tool_output": 0.0017645410262048244, + "src/backend/tests/unit/custom/custom_component/test_update_outputs.py::TestComponentOutputs::test_run_and_validate_update_outputs_with_multiple_outputs": 0.0005928330938331783, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_accessing_non_registered_callback": 0.00044916599290445447, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_accessing_non_registered_event_callback_with_recommended_fix": 0.0003390010679140687, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_accessing_registered_event_callback": 0.00024991598911583424, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_event_id_uniqueness_with_await": 0.0026247500209137797, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_handling_large_number_of_events": 0.0006205420941114426, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_performance_impact_frequent_registrations": 0.0012078340514563024, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_queue_receives_correct_event_data_format": 0.00462833303026855, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_register_event_with_empty_name": 0.00041325093479827046, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_register_event_with_invalid_name_fixed": 0.00030162499751895666, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_register_event_with_valid_name_and_callback_with_mock_callback": 0.0013502090005204082, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_register_event_with_valid_name_and_no_callback": 0.0002652100520208478, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_register_event_without_event_type_argument_fixed": 0.002235958003439009, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_sending_event_with_complex_data": 0.004761000047437847, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_sending_event_with_none_data": 0.0002628749352879822, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_sending_event_with_valid_type_and_data_asyncio_plugin": 0.007096707937307656, + "src/backend/tests/unit/events/test_event_manager.py::TestEventManager::test_thread_safety_accessing_events_dictionary": 0.008751457964535803, + "src/backend/tests/unit/exceptions/test_api.py::test_api_exception": 0.004037750011775643, + "src/backend/tests/unit/exceptions/test_api.py::test_api_exception_no_flow": 0.0003051239182241261, + "src/backend/tests/unit/graph/edge/test_edge_base.py::test_edge_raises_error_on_invalid_target_handle": 0.028969040955416858, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_model_and_assign_values_fails": 0.002509708981961012, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_model_with_fields_from_kwargs": 0.0015128339873626828, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_model_with_invalid_callable": 0.001136915001552552, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_model_with_valid_return_type_annotations": 0.003671207930892706, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_with_multiple_components": 0.0030033339862711728, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_create_with_pydantic_field": 0.0027249569538980722, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_default_model_name_to_state": 0.0006715419585816562, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_graph_functional_start_state_update": 0.0040702500264160335, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_handle_empty_kwargs_gracefully": 0.0004774159751832485, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_raise_typeerror_for_invalid_field_type_in_tuple": 0.0004116260097362101, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_raise_valueerror_for_invalid_field_type_in_tuple": 0.00342700001783669, + "src/backend/tests/unit/graph/graph/state/test_state_model.py::TestCreateStateModel::test_raise_valueerror_for_unsupported_value_types": 0.00034600100480020046, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph": 1.8664495000848547, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional": 0.07218241697410122, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_async_start": 0.11505883297650144, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_start": 1.4226566250436008, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_functional_start_end": 0.030356583010870963, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_not_prepared": 0.037500042002648115, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_set_with_invalid_component": 0.0009155830484814942, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_set_with_valid_component": 0.00019387598149478436, + "src/backend/tests/unit/graph/graph/test_base.py::test_graph_with_edge": 0.1054259579977952, + "src/backend/tests/unit/graph/graph/test_callback_graph.py::test_callback_graph": 0.00015587499365210533, + "src/backend/tests/unit/graph/graph/test_cycles.py::test_conditional_router_max_iterations": 0.041235500015318394, + "src/backend/tests/unit/graph/graph/test_cycles.py::test_cycle_in_graph": 0.000267457973677665, + "src/backend/tests/unit/graph/graph/test_cycles.py::test_cycle_in_graph_max_iterations": 0.037366332951933146, + "src/backend/tests/unit/graph/graph/test_cycles.py::test_that_outputs_cache_is_set_to_false_in_cycle": 0.010305042087566108, + "src/backend/tests/unit/graph/graph/test_graph_state_model.py::test_graph_functional_start_graph_state_update": 0.018378626031335443, + "src/backend/tests/unit/graph/graph/test_graph_state_model.py::test_graph_state_model": 0.02349175198469311, + "src/backend/tests/unit/graph/graph/test_graph_state_model.py::test_graph_state_model_json_schema": 0.00039483298314735293, + "src/backend/tests/unit/graph/graph/test_graph_state_model.py::test_graph_state_model_serialization": 0.018142791057471186, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_add_to_vertices_being_run": 0.0002476670197211206, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_are_all_predecessors_fulfilled": 0.0011267910012975335, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_are_all_predecessors_fulfilled__wrong": 0.0003554169670678675, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_build_run_map": 0.00026812596479430795, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict": 0.000963873986620456, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_run_map__bad_case": 0.001472541014663875, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_run_predecessors__bad_case": 0.0010348749347031116, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_vertices_being_run__bad_case": 0.001993166981264949, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_from_dict_without_vertices_to_run__bad_case": 0.0003054160042665899, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable": 0.0002876249491237104, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_is_active": 0.00024312501773238182, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_run_predecessors": 0.0005400410736910999, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_is_vertex_runnable__wrong_vertices_to_run": 0.0014861250529065728, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_pickle": 0.0011441680253483355, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_remove_from_predecessors": 0.00044087396236136556, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_remove_vertex_from_runnables": 0.0005044579738751054, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_to_dict": 0.0017160000279545784, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_run_state": 0.0003283339901827276, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_vertex_run_state": 0.0005591660155914724, + "src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py::test_update_vertex_run_state__bad_case": 0.0004551669699139893, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_detects_cycles_in_simple_graph": 0.00026724993949756026, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_disconnected_components": 0.0002821669913828373, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_duplicate_edges": 0.00021912500960752368, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_identifies_multiple_cycles": 0.00025812501553446054, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_large_graphs_efficiency": 0.0007382910698652267, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_mixed_data_types_in_edges": 0.00021845696028321981, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_multiple_edges_between_same_nodes": 0.0002460830728523433, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_no_cycles_present": 0.0002756660105660558, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_nodes_with_no_incoming_edges": 0.00021979294251650572, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_nodes_with_no_outgoing_edges": 0.0003090410609729588, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_self_loops": 0.000253000995144248, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindAllCycleEdges::test_single_node_no_edges": 0.00026000093203037977, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_detects_cycle_in_simple_graph": 0.00045262498315423727, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_disconnected_components": 0.00022062496282160282, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_duplicate_edges": 0.0007739180000498891, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_empty_edges_list": 0.0002501250128261745, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_identifies_first_cycle": 0.000237041967920959, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_large_graph_efficiency": 0.0009104579803533852, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_multiple_cycles": 0.0002475010114721954, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_multiple_edges_between_same_nodes": 0.00023878994397819042, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_nodes_with_no_outgoing_edges": 0.00029970903415232897, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_returns_none_when_no_cycle": 0.00024695793399587274, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_self_loop_cycle": 0.000244333001319319, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleEdge::test_single_node_no_edges": 0.00023858301574364305, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_correctly_identify_and_return_vertices_in_single_cycle": 0.00028429104713723063, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_detect_cycles_simple_graph": 0.0007923329831101, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_duplicate_edges_fixed_fixed": 0.000366666994523257, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_empty_edges": 0.00023050099844112992, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_large_graphs_efficiently": 0.0004664169973693788, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_no_outgoing_edges": 0.0002648750669322908, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_self_loops": 0.0009492079843766987, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_single_cycle": 0.0002994999522343278, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_two_inputs_in_cycle[0]": 0.00038199900882318616, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_two_inputs_in_cycle[1]": 0.0008092499920167029, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_two_inputs_in_cycle[2]": 0.0015593329444527626, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_two_inputs_in_cycle[3]": 0.0006993749993853271, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_handle_two_inputs_in_cycle[4]": 0.00036212499253451824, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_no_cycles_empty_list": 0.00025908404495567083, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_no_modification_of_input_edges_list": 0.0003286660648882389, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_non_string_vertex_ids": 0.00036191800609230995, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_process_disconnected_components": 0.0005638339789584279, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_return_vertices_involved_in_multiple_cycles": 0.0003491259994916618, + "src/backend/tests/unit/graph/graph/test_utils.py::TestFindCycleVertices::test_single_vertex_no_edges": 0.00023916596546769142, + "src/backend/tests/unit/graph/graph/test_utils.py::test_get_successors_a": 0.0002698740572668612, + "src/backend/tests/unit/graph/graph/test_utils.py::test_get_successors_z": 0.0019688329775817692, + "src/backend/tests/unit/graph/graph/test_utils.py::test_has_cycle": 0.0003182920045219362, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_a": 0.0008160430006682873, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_g": 0.001312917040195316, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_h": 0.000272500969003886, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_invalid_vertex": 0.0019030409748665988, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_m": 0.0005449170130304992, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_n_is_start": 0.0005516259698197246, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_t": 0.0007446670206263661, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_x": 0.00027004204457625747, + "src/backend/tests/unit/graph/graph/test_utils.py::test_sort_up_to_vertex_z": 0.00029762403573840857, + "src/backend/tests/unit/graph/test_graph.py::test_build_edges": 0.001086625037714839, + "src/backend/tests/unit/graph/test_graph.py::test_build_nodes": 0.0012113330303691328, + "src/backend/tests/unit/graph/test_graph.py::test_build_params": 0.00745550001738593, + "src/backend/tests/unit/graph/test_graph.py::test_circular_dependencies": 0.0011518750106915832, + "src/backend/tests/unit/graph/test_graph.py::test_find_last_node": 0.002307749993633479, + "src/backend/tests/unit/graph/test_graph.py::test_get_node": 3.6276886249543168, + "src/backend/tests/unit/graph/test_graph.py::test_get_node_neighbors_basic": 0.0015942919999361038, + "src/backend/tests/unit/graph/test_graph.py::test_get_root_vertex": 0.00336533400695771, + "src/backend/tests/unit/graph/test_graph.py::test_get_vertices_with_target": 0.0015001240535639226, + "src/backend/tests/unit/graph/test_graph.py::test_graph_structure": 3.660518125980161, + "src/backend/tests/unit/graph/test_graph.py::test_invalid_node_types": 0.026237833022605628, + "src/backend/tests/unit/graph/test_graph.py::test_matched_type": 0.0011828330461867154, + "src/backend/tests/unit/graph/test_graph.py::test_pickle_graph": 0.025576499931048602, + "src/backend/tests/unit/graph/test_graph.py::test_process_flow": 0.001180750085040927, + "src/backend/tests/unit/graph/test_graph.py::test_process_flow_one_group": 0.00234074896434322, + "src/backend/tests/unit/graph/test_graph.py::test_process_flow_vector_store_grouped": 0.0031264160061255097, + "src/backend/tests/unit/graph/test_graph.py::test_serialize_graph": 0.034727626014500856, + "src/backend/tests/unit/graph/test_graph.py::test_set_new_target_handle": 0.0003109159297309816, + "src/backend/tests/unit/graph/test_graph.py::test_ungroup_node": 0.0010227089514955878, + "src/backend/tests/unit/graph/test_graph.py::test_update_source_handle": 0.00022512488067150116, + "src/backend/tests/unit/graph/test_graph.py::test_update_target_handle_proxy": 0.0004344579647295177, + "src/backend/tests/unit/graph/test_graph.py::test_update_template": 0.0004319589934311807, + "src/backend/tests/unit/graph/test_graph.py::test_validate_edges": 0.0010510420543141663, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_correctly_accesses_descriptions_recommended_fix": 0.0006701659876853228, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_create_model_from_valid_schema": 0.0010890840785577893, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_handle_empty_schema": 0.00046241702511906624, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_handle_large_schemas_efficiently": 0.0006454579415731132, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_handles_multiple_fields_fixed_with_instance_check": 0.0007467090617865324, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_manages_unknown_field_types": 0.0004999999073334038, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_nested_list_and_dict_types_handling": 0.000673624046612531, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_no_duplicate_field_names_fixed_fixed": 0.001717083971016109, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_process_schema_missing_optional_keys_updated": 0.0008960830164141953, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_raises_error_for_invalid_input_different_exception_with_specific_exception": 0.0002627489739097655, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_returns_valid_model_class": 0.0005503349239006639, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_schema_fields_with_none_default": 0.00063270702958107, + "src/backend/tests/unit/helpers/test_base_model_from_schema.py::TestBuildModelFromSchema::test_supports_single_and_multiple_type_annotations": 0.0007200430845841765, + "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot": 12.820865958055947, + "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot_dump_components_and_edges": 0.012427791021764278, + "src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py::test_memory_chatbot_dump_structure": 0.02846895798575133, + "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag": 1.9493195419781841, + "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_add": 0.06917400000384077, + "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_dump": 0.037611624982673675, + "src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py::test_vector_store_rag_dump_components_and_edges": 0.03499108296819031, + "src/backend/tests/unit/inputs/test_inputs.py::test_bool_input_invalid": 0.0002666670479811728, + "src/backend/tests/unit/inputs/test_inputs.py::test_bool_input_valid": 0.0002622499596327543, + "src/backend/tests/unit/inputs/test_inputs.py::test_code_input_valid": 0.00045866600703448057, + "src/backend/tests/unit/inputs/test_inputs.py::test_data_input_valid": 0.0002644999767653644, + "src/backend/tests/unit/inputs/test_inputs.py::test_dict_input_invalid": 0.00023766793310642242, + "src/backend/tests/unit/inputs/test_inputs.py::test_dict_input_valid": 0.0002497490495443344, + "src/backend/tests/unit/inputs/test_inputs.py::test_dropdown_input_invalid": 0.0005181650049053133, + "src/backend/tests/unit/inputs/test_inputs.py::test_dropdown_input_valid": 0.0004622919950634241, + "src/backend/tests/unit/inputs/test_inputs.py::test_file_input_valid": 0.00024066702462732792, + "src/backend/tests/unit/inputs/test_inputs.py::test_float_input_invalid": 0.00033175002317875624, + "src/backend/tests/unit/inputs/test_inputs.py::test_float_input_valid": 0.0013439589529298246, + "src/backend/tests/unit/inputs/test_inputs.py::test_handle_input_invalid": 0.000246831972617656, + "src/backend/tests/unit/inputs/test_inputs.py::test_handle_input_valid": 0.0002542920410633087, + "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_comprehensive": 0.00035108200972899795, + "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_invalid": 0.00032504100818187, + "src/backend/tests/unit/inputs/test_inputs.py::test_instantiate_input_valid": 0.000264250033069402, + "src/backend/tests/unit/inputs/test_inputs.py::test_int_input_invalid": 0.0005710420082323253, + "src/backend/tests/unit/inputs/test_inputs.py::test_int_input_valid": 0.00026408396661281586, + "src/backend/tests/unit/inputs/test_inputs.py::test_message_text_input_invalid": 0.0003577919560484588, + "src/backend/tests/unit/inputs/test_inputs.py::test_message_text_input_valid": 0.00043400004506111145, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_input_invalid": 0.00033308297861367464, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_input_valid": 0.0003004170721396804, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_secret_input_invalid": 0.00037404202157631516, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiline_secret_input_valid": 0.00028283405117690563, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiselect_input_invalid": 0.0002282499335706234, + "src/backend/tests/unit/inputs/test_inputs.py::test_multiselect_input_valid": 0.0002297920291312039, + "src/backend/tests/unit/inputs/test_inputs.py::test_nested_dict_input_invalid": 0.0003780000261031091, + "src/backend/tests/unit/inputs/test_inputs.py::test_nested_dict_input_valid": 0.0003223739913664758, + "src/backend/tests/unit/inputs/test_inputs.py::test_prompt_input_valid": 0.0006935019628144801, + "src/backend/tests/unit/inputs/test_inputs.py::test_secret_str_input_invalid": 0.00028462399495765567, + "src/backend/tests/unit/inputs/test_inputs.py::test_secret_str_input_valid": 0.00033654196886345744, + "src/backend/tests/unit/inputs/test_inputs.py::test_str_input_invalid": 0.0011346670216880739, + "src/backend/tests/unit/inputs/test_inputs.py::test_str_input_valid": 0.000880667008459568, + "src/backend/tests/unit/inputs/test_inputs.py::test_table_input_invalid": 0.00380275008501485, + "src/backend/tests/unit/inputs/test_inputs.py::test_table_input_valid": 0.001983581983949989, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_complex_nested_structures_handling": 0.0030952919623814523, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_default_values_assignment": 0.0005185420159250498, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_default_values_for_non_required_fields": 0.0007797080907039344, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_empty_list_of_inputs": 0.00041016493923962116, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_field_types_conversion": 0.0004856659797951579, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_fields_creation_with_correct_types_and_attributes": 0.000862583052366972, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_invalid_field_types_handling": 0.0005195839912630618, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_is_list_attribute_processing": 0.0005015420028939843, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_is_list_handling": 0.0006114579737186432, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_missing_attributes_handling": 0.0008052079356275499, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_missing_optional_attributes": 0.000468374986667186, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_mixed_required_optional_fields_processing": 0.002099290897604078, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_multiple_input_types": 0.0024241250357590616, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_non_standard_field_types_handling": 0.008301167981699109, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_none_default_value_handling": 0.0005369589780457318, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_options_attribute_processing": 0.0006440419820137322, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_options_handling": 0.0006011250079609454, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_passing_input_type_directly": 0.00027875008527189493, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_schema_model_creation": 0.0005466659786179662, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_single_input_type_conversion": 0.0004978319630026817, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_single_input_type_replica": 0.0006945840432308614, + "src/backend/tests/unit/io/test_io_schema.py::TestCreateInputSchema::test_special_characters_in_names_handling": 0.0006890429649502039, + "src/backend/tests/unit/io/test_io_schema.py::test_create_input_schema": 0.001329874969087541, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_create_column_with_valid_formatter": 0.00028562499210238457, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_create_column_without_display_name": 0.0002880429965443909, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_create_with_type_instead_of_formatter": 0.00029345706570893526, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_default_sortable_filterable": 0.0007682930445298553, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_description_and_default": 0.0002763750380836427, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_formatter_explicitly_set_to_enum": 0.00034366699401289225, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_formatter_none_when_not_provided": 0.00028391601517796516, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_formatter_set_based_on_value": 0.00027029094053432345, + "src/backend/tests/unit/io/test_table_schema.py::TestColumn::test_invalid_formatter_raises_value_error": 0.0004301670123822987, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_allow_markdown_override": 0.0002733329893089831, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_initialize_with_empty_contents": 0.00028624996775761247, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_initialize_with_valid_title_and_contents": 0.0007044579833745956, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_invalid_contents_type": 0.0004489150596782565, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_media_url_handling": 0.0002673749695532024, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_serialize_contents": 0.0003351250197738409, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_single_content_conversion": 0.0008065829752013087, + "src/backend/tests/unit/schema/test_content_block.py::TestContentBlock::test_validate_different_content_types": 0.00029950099997222424, + "src/backend/tests/unit/schema/test_content_types.py::TestBaseContent::test_base_content_serialization": 0.0002906660665757954, + "src/backend/tests/unit/schema/test_content_types.py::TestBaseContent::test_base_content_with_duration": 0.00024025002494454384, + "src/backend/tests/unit/schema/test_content_types.py::TestBaseContent::test_base_content_with_header": 0.000263167021330446, + "src/backend/tests/unit/schema/test_content_types.py::TestCodeContent::test_code_content_creation": 0.0003462510067038238, + "src/backend/tests/unit/schema/test_content_types.py::TestCodeContent::test_code_content_without_title": 0.0004203749122098088, + "src/backend/tests/unit/schema/test_content_types.py::TestErrorContent::test_error_content_creation": 0.0007512079901061952, + "src/backend/tests/unit/schema/test_content_types.py::TestErrorContent::test_error_content_optional_fields": 0.0004739170544780791, + "src/backend/tests/unit/schema/test_content_types.py::TestJSONContent::test_json_content_complex_data": 0.00026983401039615273, + "src/backend/tests/unit/schema/test_content_types.py::TestJSONContent::test_json_content_creation": 0.0002479159738868475, + "src/backend/tests/unit/schema/test_content_types.py::TestMediaContent::test_media_content_creation": 0.0003469169605523348, + "src/backend/tests/unit/schema/test_content_types.py::TestMediaContent::test_media_content_without_caption": 0.0003001249278895557, + "src/backend/tests/unit/schema/test_content_types.py::TestTextContent::test_text_content_creation": 0.00026616599643602967, + "src/backend/tests/unit/schema/test_content_types.py::TestTextContent::test_text_content_with_duration": 0.0002679169992916286, + "src/backend/tests/unit/schema/test_content_types.py::TestToolContent::test_tool_content_creation": 0.001486831926740706, + "src/backend/tests/unit/schema/test_content_types.py::TestToolContent::test_tool_content_minimal": 0.00028129201382398605, + "src/backend/tests/unit/schema/test_content_types.py::TestToolContent::test_tool_content_with_error": 0.0016390830860473216, + "src/backend/tests/unit/schema/test_content_types.py::test_content_type_discrimination": 0.00027854100335389376, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_ai_response": 0.0002786249970085919, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_invalid_image_path": 0.0025373749085702, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_missing_required_keys": 0.0003245419356971979, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_with_image": 0.0013750839862041175, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_with_multiple_images": 0.001365667034406215, + "src/backend/tests/unit/schema/test_schema_data.py::TestDataSchema::test_data_to_message_with_text_only": 0.00027658400358632207, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_async_prompt_serialization": 0.00209424999775365, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_from_ai_text": 0.00034754094667732716, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_from_human_text": 0.0012164590298198164, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_prompt_serialization": 1.9019360410165973, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_serialization": 0.0019001259934157133, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_to_lc_without_sender": 0.0017389999702572823, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_with_invalid_image_path": 0.0007534589967690408, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_with_multiple_images": 0.016659082961268723, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_with_single_image": 0.025450084009207785, + "src/backend/tests/unit/schema/test_schema_message.py::test_message_without_sender": 0.0003494169795885682, + "src/backend/tests/unit/services/variable/test_service.py::test_create_variable": 0.02396712393965572, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_varaible_by_id": 0.0060262500192038715, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable": 0.00510391709394753, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable__ValueError": 0.0035743750049732625, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable__valueerror": 0.0073865000158548355, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable_by_id": 0.005353167012799531, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable_by_id__ValueError": 0.27340612601256, + "src/backend/tests/unit/services/variable/test_service.py::test_delete_variable_by_id__valueerror": 0.0034554169978946447, + "src/backend/tests/unit/services/variable/test_service.py::test_get_variable": 0.006138333003036678, + "src/backend/tests/unit/services/variable/test_service.py::test_get_variable__TypeError": 0.00458791694836691, + "src/backend/tests/unit/services/variable/test_service.py::test_get_variable__ValueError": 0.003811584028881043, + "src/backend/tests/unit/services/variable/test_service.py::test_get_variable__typeerror": 0.008321874018292874, + "src/backend/tests/unit/services/variable/test_service.py::test_get_variable__valueerror": 0.006569876044522971, + "src/backend/tests/unit/services/variable/test_service.py::test_initialize_user_variables__create_and_update": 0.0552749169874005, + "src/backend/tests/unit/services/variable/test_service.py::test_initialize_user_variables__donkey": 0.0002315010060556233, + "src/backend/tests/unit/services/variable/test_service.py::test_initialize_user_variables__not_found_variable": 0.04962304187938571, + "src/backend/tests/unit/services/variable/test_service.py::test_initialize_user_variables__skipping_environment_variable_storage": 0.0034132919972762465, + "src/backend/tests/unit/services/variable/test_service.py::test_list_variables": 0.008342625049408525, + "src/backend/tests/unit/services/variable/test_service.py::test_list_variables__empty": 0.0035083330585621297, + "src/backend/tests/unit/services/variable/test_service.py::test_update_variable": 0.03663441602839157, + "src/backend/tests/unit/services/variable/test_service.py::test_update_variable__ValueError": 0.0036237920285202563, + "src/backend/tests/unit/services/variable/test_service.py::test_update_variable__valueerror": 0.004020916006993502, + "src/backend/tests/unit/services/variable/test_service.py::test_update_variable_fields": 0.005824209074489772, + "src/backend/tests/unit/test_api_key.py::test_create_api_key": 2.760331876052078, + "src/backend/tests/unit/test_api_key.py::test_delete_api_key": 4.250179124006536, + "src/backend/tests/unit/test_api_key.py::test_get_api_keys": 11.92522079194896, "src/backend/tests/unit/test_cache.py::test_build_graph": 1.1988659180001378, - "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow": 5.618974041000001, - "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow_from_request_data": 5.709847209000145, - "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow_with_frozen_path": 6.376642749999974, - "src/backend/tests/unit/test_cli.py::test_components_path": 1.2666867920006553, - "src/backend/tests/unit/test_cli.py::test_superuser": 1.6751555829996505, - "src/backend/tests/unit/test_custom_component.py::test_build_config_field_keys": 0.2042501250002715, - "src/backend/tests/unit/test_custom_component.py::test_build_config_field_value_keys": 0.19506029200010744, - "src/backend/tests/unit/test_custom_component.py::test_build_config_field_values_dict": 0.1966598340004566, - "src/backend/tests/unit/test_custom_component.py::test_build_config_fields_dict": 0.20268266700031745, - "src/backend/tests/unit/test_custom_component.py::test_build_config_has_fields": 0.1948232069998994, - "src/backend/tests/unit/test_custom_component.py::test_build_config_no_code": 0.000207583999781491, - "src/backend/tests/unit/test_custom_component.py::test_build_config_return_type": 0.19622991700043713, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_get_tree": 0.00027658399994834326, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_init": 0.0002327079996575776, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_ann_assign": 0.00019754200002353173, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_arg_no_annotation": 0.00018179199969381443, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_arg_with_annotation": 0.0001805840006454673, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_assign": 0.0002079589999084419, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_callable_details_no_args": 0.0001973339999494783, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_classes": 0.00032654200003889855, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_classes_raises": 0.0002649169996402634, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_function_def_init": 0.00018600100020194077, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_function_def_not_init": 0.00019508200011841836, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_functions": 0.00021508300005734782, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_global_vars": 0.00020745999972859863, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_imports_import": 0.00025808299960772274, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_imports_importfrom": 0.00018808400045600138, - "src/backend/tests/unit/test_custom_component.py::test_code_parser_syntax_error": 0.001801750000595348, - "src/backend/tests/unit/test_custom_component.py::test_component_code_null_error": 0.0001859579997471883, - "src/backend/tests/unit/test_custom_component.py::test_component_get_code_tree": 0.0006688740004392457, - "src/backend/tests/unit/test_custom_component.py::test_component_get_code_tree_syntax_error": 0.0002742089995990682, - "src/backend/tests/unit/test_custom_component.py::test_component_get_function_valid": 0.00021645800006808713, - "src/backend/tests/unit/test_custom_component.py::test_component_init": 0.00018979200012836372, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_build_not_implemented": 0.00019016700025531463, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_build_template_config": 0.00036833300055150175, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_class_template_validation_no_code": 0.00018979200012836372, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_code_tree_syntax_error": 0.0002411679997749161, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function": 0.00022095800022725598, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_args": 0.0006820000003244786, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_args_no_args": 0.0003158340000481985, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_return_type": 0.0005181660003472643, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_return_type_no_return_type": 0.00031354199927591253, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_valid": 0.00019329300039316877, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_main_class_name": 0.0005222909999247349, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_main_class_name_no_main_class": 0.00021295800024745404, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_init": 0.0001823750003495661, - "src/backend/tests/unit/test_custom_component.py::test_custom_component_multiple_outputs": 0.19756641600042713, + "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow": 8.551071708032396, + "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow_from_request_data": 5.552124626003206, + "src/backend/tests/unit/test_chat_endpoint.py::test_build_flow_with_frozen_path": 5.318790583114605, + "src/backend/tests/unit/test_cli.py::test_components_path": 0.1909987919498235, + "src/backend/tests/unit/test_cli.py::test_superuser": 0.10571425000671297, + "src/backend/tests/unit/test_custom_component.py::test_build_config_field_keys": 0.0003112919512204826, + "src/backend/tests/unit/test_custom_component.py::test_build_config_field_value_keys": 0.00025070906849578023, + "src/backend/tests/unit/test_custom_component.py::test_build_config_field_values_dict": 0.0002822090173140168, + "src/backend/tests/unit/test_custom_component.py::test_build_config_fields_dict": 0.0007648749742656946, + "src/backend/tests/unit/test_custom_component.py::test_build_config_has_fields": 0.00025216699577867985, + "src/backend/tests/unit/test_custom_component.py::test_build_config_no_code": 0.00024991604732349515, + "src/backend/tests/unit/test_custom_component.py::test_build_config_return_type": 0.00036433403147384524, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_get_tree": 0.0003267080173827708, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_init": 0.00029012502636760473, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_ann_assign": 0.00023275002604350448, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_arg_no_annotation": 0.00024983298499137163, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_arg_with_annotation": 0.0003472079406492412, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_assign": 0.0002852080506272614, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_callable_details_no_args": 0.0002649589441716671, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_classes": 0.0004057500045746565, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_classes_raises": 0.0002842499525286257, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_function_def_init": 0.00023675098782405257, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_function_def_not_init": 0.0002423340338282287, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_functions": 0.0003364600124768913, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_global_vars": 0.0002477499656379223, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_imports_import": 0.0003402500879019499, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_parse_imports_importfrom": 0.0003161249333061278, + "src/backend/tests/unit/test_custom_component.py::test_code_parser_syntax_error": 0.0005928329774178565, + "src/backend/tests/unit/test_custom_component.py::test_component_code_null_error": 0.00034683302510529757, + "src/backend/tests/unit/test_custom_component.py::test_component_get_code_tree": 0.002789582998957485, + "src/backend/tests/unit/test_custom_component.py::test_component_get_code_tree_syntax_error": 0.0016098330379463732, + "src/backend/tests/unit/test_custom_component.py::test_component_get_function_valid": 0.0002673339331522584, + "src/backend/tests/unit/test_custom_component.py::test_component_init": 0.0002568330382928252, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_build_not_implemented": 0.0002595419646240771, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_build_template_config": 0.002081999904476106, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_class_template_validation_no_code": 0.0007612079498358071, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_code_tree_syntax_error": 0.0007237909594550729, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function": 0.001917583984322846, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_args": 0.000828583026304841, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_args_no_args": 0.0004628330352716148, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_return_type": 0.0007794589619152248, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_entrypoint_return_type_no_return_type": 0.0003956669825129211, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_function_valid": 0.0002475408837199211, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_main_class_name": 0.0006563340430147946, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_get_main_class_name_no_main_class": 0.00037370907375589013, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_init": 0.00027370802126824856, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_multiple_outputs": 0.00709158304380253, + "src/backend/tests/unit/test_custom_component.py::test_custom_component_subclass_from_lctoolcomponent": 0.0016188329900614917, "src/backend/tests/unit/test_custom_component.py::test_list_flows_flow_objects": 1.981454541994026, - "src/backend/tests/unit/test_custom_component.py::test_list_flows_return_type": 0.2607629169997381, - "src/backend/tests/unit/test_custom_component_with_client.py::test_list_flows_flow_objects": 1.306553499999609, - "src/backend/tests/unit/test_data_class.py::test_add_method_for_integers": 1.1870880830006172, - "src/backend/tests/unit/test_data_class.py::test_add_method_for_strings": 1.1892704990000311, - "src/backend/tests/unit/test_data_class.py::test_add_method_with_non_overlapping_keys": 1.1890747499996905, - "src/backend/tests/unit/test_data_class.py::test_conversion_from_document": 1.2196368339996297, - "src/backend/tests/unit/test_data_class.py::test_conversion_to_document": 1.1908134569998765, - "src/backend/tests/unit/test_data_class.py::test_custom_attribute_get_set_del": 1.1929801669998596, - "src/backend/tests/unit/test_data_class.py::test_custom_attribute_setting_and_getting": 1.1900344170003336, - "src/backend/tests/unit/test_data_class.py::test_data_initialization": 1.2146948340000563, - "src/backend/tests/unit/test_data_class.py::test_deep_copy": 1.1954752910000934, - "src/backend/tests/unit/test_data_class.py::test_dir_includes_data_keys": 2.760468000000401, - "src/backend/tests/unit/test_data_class.py::test_dir_reflects_attribute_deletion": 1.2251055420001649, - "src/backend/tests/unit/test_data_class.py::test_get_text_with_empty_data": 1.2183382499997606, - "src/backend/tests/unit/test_data_class.py::test_get_text_with_none_data": 1.2144034150001062, - "src/backend/tests/unit/test_data_class.py::test_get_text_with_text_key": 1.2315653749997182, - "src/backend/tests/unit/test_data_class.py::test_get_text_without_text_key": 1.2123793340001612, - "src/backend/tests/unit/test_data_class.py::test_str_and_dir_methods": 1.2003138760001093, - "src/backend/tests/unit/test_data_class.py::test_validate_data_with_extra_keys": 1.1931614580003043, - "src/backend/tests/unit/test_data_components.py::test_build_with_multiple_urls": 0.005447873999855801, - "src/backend/tests/unit/test_data_components.py::test_directory_component_build_with_multithreading": 0.0007867510003052303, - "src/backend/tests/unit/test_data_components.py::test_directory_without_mocks": 0.25596470800019233, - "src/backend/tests/unit/test_data_components.py::test_failed_request": 0.00482766699997228, - "src/backend/tests/unit/test_data_components.py::test_parse_curl": 0.0003262509999331087, - "src/backend/tests/unit/test_data_components.py::test_successful_get_request": 0.0054463749997921695, - "src/backend/tests/unit/test_data_components.py::test_timeout": 0.004433167000115645, - "src/backend/tests/unit/test_data_components.py::test_url_component": 0.4809711249999964, - "src/backend/tests/unit/test_database.py::test_create_flow": 1.6828834990005817, - "src/backend/tests/unit/test_database.py::test_create_flow_with_invalid_data": 1.6774957920001725, - "src/backend/tests/unit/test_database.py::test_create_flows": 1.673384124999302, - "src/backend/tests/unit/test_database.py::test_delete_flow": 1.6573062500006017, - "src/backend/tests/unit/test_database.py::test_delete_flows": 1.7055409169997802, - "src/backend/tests/unit/test_database.py::test_delete_flows_with_transaction_and_build": 1.7174484999995911, - "src/backend/tests/unit/test_database.py::test_delete_nonexistent_flow": 1.6730859590002183, - "src/backend/tests/unit/test_database.py::test_download_file": 3.4064119170002414, - "src/backend/tests/unit/test_database.py::test_get_nonexistent_flow": 1.6842376250001507, - "src/backend/tests/unit/test_database.py::test_load_flows": 1.3783337080003548, - "src/backend/tests/unit/test_database.py::test_migrate_transactions": 1.40214775000004, - "src/backend/tests/unit/test_database.py::test_migrate_transactions_no_duckdb": 1.364264834000096, - "src/backend/tests/unit/test_database.py::test_read_flow": 1.6665177079999012, - "src/backend/tests/unit/test_database.py::test_read_flows": 1.7203122079999957, - "src/backend/tests/unit/test_database.py::test_read_only_starter_projects": 1.7277780829995208, - "src/backend/tests/unit/test_database.py::test_sqlite_pragmas": 1.3023467509997317, - "src/backend/tests/unit/test_database.py::test_update_flow": 1.6924472499999865, - "src/backend/tests/unit/test_database.py::test_update_flow_idempotency": 1.7028064580003957, - "src/backend/tests/unit/test_database.py::test_update_nonexistent_flow": 1.7007274170000528, - "src/backend/tests/unit/test_database.py::test_upload_file": 1.6786055420002413, - "src/backend/tests/unit/test_experimental_components.py::test_python_function_component": 1.3152118330008307, - "src/backend/tests/unit/test_files.py::test_delete_file": 1.8698114990006616, - "src/backend/tests/unit/test_files.py::test_download_file": 1.8618997500002479, - "src/backend/tests/unit/test_files.py::test_file_operations": 1.8774095419998957, - "src/backend/tests/unit/test_files.py::test_list_files": 1.8360633750003217, - "src/backend/tests/unit/test_files.py::test_upload_file": 1.8361791669995, - "src/backend/tests/unit/test_frontend_nodes.py::test_frontend_node_to_dict": 1.2789837089994762, - "src/backend/tests/unit/test_frontend_nodes.py::test_template_field_defaults": 1.4903630839994548, - "src/backend/tests/unit/test_frontend_nodes.py::test_template_to_dict": 3.1915530399996896, - "src/backend/tests/unit/test_helper_components.py::test_data_as_text_component": 1.2936121669995373, - "src/backend/tests/unit/test_helper_components.py::test_uuid_generator_component": 1.2925201660000312, - "src/backend/tests/unit/test_initial_setup.py::test_create_or_update_starter_projects": 1.3174052090002988, - "src/backend/tests/unit/test_initial_setup.py::test_get_project_data": 1.3585754989994712, - "src/backend/tests/unit/test_initial_setup.py::test_load_starter_projects": 1.2834318330001224, - "src/backend/tests/unit/test_initial_setup.py::test_refresh_starter_projects": 2.5242092499997852, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_create_secret": 1.277097417000732, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_delete_secret": 1.2887339569997494, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_email_address": 1.3158049170001505, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_encode_string": 1.2695311679999577, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_encode_uuid": 1.259859792000043, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_ends_with_non_alphanumeric": 1.302864874999159, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_get_secret": 1.2660873319996426, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_long_string": 1.2887962920003702, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_starts_with_non_alphanumeric": 1.3552359580003213, - "src/backend/tests/unit/test_kubernetes_secrets.py::test_uuid_case_insensitivity": 1.299499248000302, - "src/backend/tests/unit/test_loading.py::test_load_flow_from_json": 0.0960882499998661, - "src/backend/tests/unit/test_loading.py::test_load_flow_from_json_object": 0.018880542000260903, - "src/backend/tests/unit/test_loading.py::test_load_flow_from_json_with_tweaks": 0.016509083999608265, - "src/backend/tests/unit/test_logger.py::test_enabled": 1.0503544179996425, - "src/backend/tests/unit/test_logger.py::test_get_after_timestamp": 1.057395667000037, - "src/backend/tests/unit/test_logger.py::test_get_before_timestamp": 1.069635374000427, - "src/backend/tests/unit/test_logger.py::test_get_last_n": 1.0638129170001775, - "src/backend/tests/unit/test_logger.py::test_init_default": 3.221781582999938, - "src/backend/tests/unit/test_logger.py::test_init_with_env_variable": 1.1272078760002842, - "src/backend/tests/unit/test_logger.py::test_len": 1.0672918339996613, - "src/backend/tests/unit/test_logger.py::test_max_size": 1.0532803749997584, - "src/backend/tests/unit/test_logger.py::test_write": 1.083741292999548, - "src/backend/tests/unit/test_logger.py::test_write_overflow": 1.0727743750007903, - "src/backend/tests/unit/test_login.py::test_login_successful": 1.495486374999473, - "src/backend/tests/unit/test_login.py::test_login_unsuccessful_wrong_password": 1.2617856669999128, - "src/backend/tests/unit/test_login.py::test_login_unsuccessful_wrong_username": 1.0718942079993212, - "src/backend/tests/unit/test_messages.py::test_add_messages": 1.0813413340001716, - "src/backend/tests/unit/test_messages.py::test_add_messagetables": 1.064473082999939, - "src/backend/tests/unit/test_messages.py::test_convert_to_langchain[convert_to_langchain_type]": 1.0604117919997407, - "src/backend/tests/unit/test_messages.py::test_convert_to_langchain[message]": 1.0689540419998593, - "src/backend/tests/unit/test_messages.py::test_delete_messages": 1.1744598340001176, - "src/backend/tests/unit/test_messages.py::test_get_messages": 1.0707767070002774, - "src/backend/tests/unit/test_messages.py::test_store_message": 1.0799672490002195, - "src/backend/tests/unit/test_process.py::test_load_langchain_object_with_cached_session": 1.0705989160001081, - "src/backend/tests/unit/test_process.py::test_load_langchain_object_with_no_cached_session": 1.0660591649993876, - "src/backend/tests/unit/test_process.py::test_load_langchain_object_without_session_id": 1.0685117090001768, - "src/backend/tests/unit/test_process.py::test_multiple_tweaks": 3.4656270829996174, - "src/backend/tests/unit/test_process.py::test_no_tweaks": 1.0528393340005096, - "src/backend/tests/unit/test_process.py::test_single_tweak": 1.0588130830001319, - "src/backend/tests/unit/test_process.py::test_tweak_no_node_id": 1.0907960420004201, - "src/backend/tests/unit/test_process.py::test_tweak_not_in_template": 1.065006457999516, - "src/backend/tests/unit/test_setup_superuser.py::test_teardown_superuser_default_superuser": 1.0545740010002191, - "src/backend/tests/unit/test_setup_superuser.py::test_teardown_superuser_no_default_superuser": 1.054325458999756, - "src/backend/tests/unit/test_telemetry.py::test_gauge": 1.081740833999902, - "src/backend/tests/unit/test_telemetry.py::test_gauge_with_counter_method": 1.056230042000152, - "src/backend/tests/unit/test_telemetry.py::test_gauge_with_historgram_method": 1.110880250000264, - "src/backend/tests/unit/test_telemetry.py::test_gauge_with_up_down_counter_method": 1.1089114589999554, - "src/backend/tests/unit/test_telemetry.py::test_increment_counter": 1.091222834000746, - "src/backend/tests/unit/test_telemetry.py::test_increment_counter_empty_label": 1.0598395000001801, - "src/backend/tests/unit/test_telemetry.py::test_increment_counter_missing_mandatory_label": 1.0523453739997422, - "src/backend/tests/unit/test_telemetry.py::test_increment_counter_unregisted_metric": 1.060642084000392, - "src/backend/tests/unit/test_telemetry.py::test_init": 1.0586442910002916, - "src/backend/tests/unit/test_telemetry.py::test_missing_labels": 1.0542013740000584, - "src/backend/tests/unit/test_telemetry.py::test_multithreaded_singleton": 1.0596915410001202, - "src/backend/tests/unit/test_telemetry.py::test_multithreaded_singleton_race_condition": 1.078360167000028, - "src/backend/tests/unit/test_telemetry.py::test_opentelementry_singleton": 1.0586542500004725, - "src/backend/tests/unit/test_template.py::test_build_template_from_function": 1.0667507910002314, - "src/backend/tests/unit/test_template.py::test_get_base_classes": 1.0488440839994837, - "src/backend/tests/unit/test_template.py::test_get_default_factory": 1.061691208999946, - "src/backend/tests/unit/test_validate_code.py::test_create_function": 3.7202681249996203, - "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_function": 1.0697480410003664, - "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_module": 1.0638055839999652, - "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_schema": 1.0542077080003764, - "src/backend/tests/unit/test_validate_code.py::test_execute_function_success": 1.0714820839998538, - "src/backend/tests/unit/test_validate_code.py::test_validate_code": 1.0665104990002874, - "src/backend/tests/unit/test_version.py::test_compute_main": 1.0618379169995933, - "src/backend/tests/unit/test_version.py::test_version": 1.0611935000001722, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol::password@host-protocol::password@host]": 0.0002089159997922252, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pa:ss:word@host-protocol:user:pa:ss:word@host]": 0.0002251249998153071, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pa@ss@word@host-protocol:user:pa%40ss%40word@host]": 0.0002600429997983156, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pass@word@host-protocol:user:pass%40word@host]": 0.0002567910000834672, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:password@-protocol:user:password@]": 0.0002196240002376726, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:password@host-protocol:user:password@host]": 0.0002557500001785229, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user@host-protocol:user@host]": 0.00021862499943381408, - "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[user:password@host-user:password@host]": 0.0002157910003006691 + "src/backend/tests/unit/test_custom_component.py::test_list_flows_return_type": 0.36947908403817564, + "src/backend/tests/unit/test_custom_component_with_client.py::test_feature_flags_add_toolkit_output": 2.7484489580092486, + "src/backend/tests/unit/test_custom_component_with_client.py::test_list_flows_flow_objects": 15.2948470840347, + "src/backend/tests/unit/test_custom_component_with_client.py::test_list_flows_return_type": 2.0909082910511643, + "src/backend/tests/unit/test_data_class.py::test_add_method_for_integers": 0.00032416597241535783, + "src/backend/tests/unit/test_data_class.py::test_add_method_for_strings": 0.00024437595857307315, + "src/backend/tests/unit/test_data_class.py::test_add_method_with_non_overlapping_keys": 0.0015005830209702253, + "src/backend/tests/unit/test_data_class.py::test_conversion_from_document": 0.0002870820462703705, + "src/backend/tests/unit/test_data_class.py::test_conversion_to_document": 0.006023500929586589, + "src/backend/tests/unit/test_data_class.py::test_custom_attribute_get_set_del": 0.00029191700741648674, + "src/backend/tests/unit/test_data_class.py::test_custom_attribute_setting_and_getting": 0.0002585409674793482, + "src/backend/tests/unit/test_data_class.py::test_data_initialization": 0.0036302070366218686, + "src/backend/tests/unit/test_data_class.py::test_deep_copy": 0.0005384170217439532, + "src/backend/tests/unit/test_data_class.py::test_dir_includes_data_keys": 0.0003018759307451546, + "src/backend/tests/unit/test_data_class.py::test_dir_reflects_attribute_deletion": 0.00028354296227917075, + "src/backend/tests/unit/test_data_class.py::test_get_text_with_empty_data": 0.0002786670229397714, + "src/backend/tests/unit/test_data_class.py::test_get_text_with_none_data": 0.000253666948992759, + "src/backend/tests/unit/test_data_class.py::test_get_text_with_text_key": 0.00026395899476483464, + "src/backend/tests/unit/test_data_class.py::test_get_text_without_text_key": 0.0003883759491145611, + "src/backend/tests/unit/test_data_class.py::test_str_and_dir_methods": 0.00035749899689108133, + "src/backend/tests/unit/test_data_class.py::test_validate_data_with_extra_keys": 0.00032462505623698235, + "src/backend/tests/unit/test_data_components.py::test_build_with_multiple_urls": 0.3398302910500206, + "src/backend/tests/unit/test_data_components.py::test_directory_component_build_with_multithreading": 0.0018548330408520997, + "src/backend/tests/unit/test_data_components.py::test_directory_without_mocks": 0.22499099909327924, + "src/backend/tests/unit/test_data_components.py::test_failed_request": 2.583180084009655, + "src/backend/tests/unit/test_data_components.py::test_parse_curl": 0.0027002080460079014, + "src/backend/tests/unit/test_data_components.py::test_successful_get_request": 0.018577999027911574, + "src/backend/tests/unit/test_data_components.py::test_timeout": 0.022905833029653877, + "src/backend/tests/unit/test_data_components.py::test_url_component": 0.16144524893024936, + "src/backend/tests/unit/test_database.py::test_create_flow": 12.229048417066224, + "src/backend/tests/unit/test_database.py::test_create_flow_with_invalid_data": 2.6868192089605145, + "src/backend/tests/unit/test_database.py::test_create_flows": 4.125035000033677, + "src/backend/tests/unit/test_database.py::test_delete_flow": 2.5442660829867236, + "src/backend/tests/unit/test_database.py::test_delete_flows": 3.0459045830066316, + "src/backend/tests/unit/test_database.py::test_delete_flows_with_transaction_and_build": 4.557027917006053, + "src/backend/tests/unit/test_database.py::test_delete_folder_with_flows_with_transaction_and_build": 5.18562575004762, + "src/backend/tests/unit/test_database.py::test_delete_nonexistent_flow": 2.7361094990046695, + "src/backend/tests/unit/test_database.py::test_download_file": 2.763290834031068, + "src/backend/tests/unit/test_database.py::test_get_flows_from_folder_pagination": 11.799599416030105, + "src/backend/tests/unit/test_database.py::test_get_flows_from_folder_pagination_with_params": 2.5585727910511196, + "src/backend/tests/unit/test_database.py::test_get_nonexistent_flow": 2.387806957005523, + "src/backend/tests/unit/test_database.py::test_load_flows": 2.0784470409998903, + "src/backend/tests/unit/test_database.py::test_migrate_transactions": 3.3142859160434455, + "src/backend/tests/unit/test_database.py::test_migrate_transactions_no_duckdb": 4.5406213329406455, + "src/backend/tests/unit/test_database.py::test_read_flow": 2.6734563740319572, + "src/backend/tests/unit/test_database.py::test_read_flows": 11.382159418077208, + "src/backend/tests/unit/test_database.py::test_read_flows_components_only": 4.525152957008686, + "src/backend/tests/unit/test_database.py::test_read_flows_components_only_paginated": 5.195073916052934, + "src/backend/tests/unit/test_database.py::test_read_flows_custom_page_size": 3.262816000089515, + "src/backend/tests/unit/test_database.py::test_read_flows_invalid_page": 5.801138084032573, + "src/backend/tests/unit/test_database.py::test_read_flows_invalid_size": 11.92538437602343, + "src/backend/tests/unit/test_database.py::test_read_flows_no_pagination_params": 3.4370020409696735, + "src/backend/tests/unit/test_database.py::test_read_flows_pagination_with_flows": 3.6235305840382352, + "src/backend/tests/unit/test_database.py::test_read_flows_pagination_with_params": 4.564620042045135, + "src/backend/tests/unit/test_database.py::test_read_flows_pagination_without_params": 2.8355551669956185, + "src/backend/tests/unit/test_database.py::test_read_folder": 2.6198389580240473, + "src/backend/tests/unit/test_database.py::test_read_folder_with_component_filter": 2.297788125986699, + "src/backend/tests/unit/test_database.py::test_read_folder_with_flows": 3.7233133749687113, + "src/backend/tests/unit/test_database.py::test_read_folder_with_pagination": 2.360611206968315, + "src/backend/tests/unit/test_database.py::test_read_folder_with_search": 2.791071208019275, + "src/backend/tests/unit/test_database.py::test_read_nonexistent_folder": 3.172841250023339, + "src/backend/tests/unit/test_database.py::test_read_only_starter_projects": 2.7734275410766713, + "src/backend/tests/unit/test_database.py::test_sqlite_pragmas": 0.01063333306228742, + "src/backend/tests/unit/test_database.py::test_update_flow": 11.933393624029122, + "src/backend/tests/unit/test_database.py::test_update_flow_idempotency": 2.8060469159972854, + "src/backend/tests/unit/test_database.py::test_update_nonexistent_flow": 5.407779291970655, + "src/backend/tests/unit/test_database.py::test_upload_file": 2.4626421239809133, + "src/backend/tests/unit/test_endpoints.py::test_build_vertex_invalid_flow_id": 3.5158219590666704, + "src/backend/tests/unit/test_endpoints.py::test_build_vertex_invalid_vertex_id": 3.11827108298894, + "src/backend/tests/unit/test_endpoints.py::test_get_all": 2.8379626250825822, + "src/backend/tests/unit/test_endpoints.py::test_get_vertices": 6.432511791004799, + "src/backend/tests/unit/test_endpoints.py::test_get_vertices_flow_not_found": 3.0379495000233874, + "src/backend/tests/unit/test_endpoints.py::test_invalid_flow_id": 4.83262683294015, + "src/backend/tests/unit/test_endpoints.py::test_invalid_prompt": 1.6217343760072254, + "src/backend/tests/unit/test_endpoints.py::test_invalid_run_with_input_type_chat": 1.796533625049051, + "src/backend/tests/unit/test_endpoints.py::test_post_validate_code": 7.020954290986992, + "src/backend/tests/unit/test_endpoints.py::test_starter_projects": 4.749609249003697, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_no_payload": 3.4047674169996753, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_input_type_any": 2.6764538330608048, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_input_type_chat": 6.699964084022213, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_input_type_text": 5.525574458006304, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_output_type_any": 4.655565874942113, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_output_type_debug": 7.888722543022595, + "src/backend/tests/unit/test_endpoints.py::test_successful_run_with_output_type_text": 8.635741250065621, + "src/backend/tests/unit/test_endpoints.py::test_valid_prompt": 2.3432591260643676, + "src/backend/tests/unit/test_endpoints.py::test_various_prompts[The weather is {weather} today.-expected_input_variables1]": 1.8489304580143653, + "src/backend/tests/unit/test_endpoints.py::test_various_prompts[This prompt has no variables.-expected_input_variables2]": 2.6452161250053905, + "src/backend/tests/unit/test_endpoints.py::test_various_prompts[{a}, {b}, and {c} are variables.-expected_input_variables3]": 4.222045834001619, + "src/backend/tests/unit/test_endpoints.py::test_various_prompts[{color} is my favorite color.-expected_input_variables0]": 3.909155082947109, + "src/backend/tests/unit/test_experimental_components.py::test_python_function_component": 0.004152042034547776, + "src/backend/tests/unit/test_files.py::test_delete_file": 5.177216041018255, + "src/backend/tests/unit/test_files.py::test_download_file": 6.059420874982607, + "src/backend/tests/unit/test_files.py::test_file_operations": 2.4048440000624396, + "src/backend/tests/unit/test_files.py::test_list_files": 5.881258791021537, + "src/backend/tests/unit/test_files.py::test_upload_file": 4.043703542964067, + "src/backend/tests/unit/test_frontend_nodes.py::test_frontend_node_to_dict": 0.0005665839998982847, + "src/backend/tests/unit/test_frontend_nodes.py::test_template_field_defaults": 0.0004388759261928499, + "src/backend/tests/unit/test_frontend_nodes.py::test_template_to_dict": 0.0014697909355163574, + "src/backend/tests/unit/test_helper_components.py::test_data_as_text_component": 0.001189751026686281, + "src/backend/tests/unit/test_helper_components.py::test_uuid_generator_component": 0.006734457972925156, + "src/backend/tests/unit/test_initial_setup.py::test_create_or_update_starter_projects": 2.297661916934885, + "src/backend/tests/unit/test_initial_setup.py::test_get_project_data": 0.01816708402475342, + "src/backend/tests/unit/test_initial_setup.py::test_load_starter_projects": 0.01591095793992281, + "src/backend/tests/unit/test_initial_setup.py::test_refresh_starter_projects": 10.713609290949535, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_create_secret": 0.0038709159125573933, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_delete_secret": 0.0016375830164179206, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_email_address": 0.00025904097128659487, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_encode_string": 0.00022795796394348145, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_encode_uuid": 0.000240708002820611, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_ends_with_non_alphanumeric": 0.001827583007980138, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_get_secret": 0.001762000028975308, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_long_string": 0.00023833394516259432, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_starts_with_non_alphanumeric": 0.0012475419789552689, + "src/backend/tests/unit/test_kubernetes_secrets.py::test_uuid_case_insensitivity": 0.00028887594817206264, + "src/backend/tests/unit/test_loading.py::test_load_flow_from_json": 1.2976477909833193, + "src/backend/tests/unit/test_loading.py::test_load_flow_from_json_object": 0.040529041027184576, + "src/backend/tests/unit/test_loading.py::test_load_flow_from_json_with_tweaks": 0.005636290996335447, + "src/backend/tests/unit/test_logger.py::test_enabled": 0.0002785429824143648, + "src/backend/tests/unit/test_logger.py::test_get_after_timestamp": 0.0006505009951069951, + "src/backend/tests/unit/test_logger.py::test_get_before_timestamp": 0.0003599180490709841, + "src/backend/tests/unit/test_logger.py::test_get_last_n": 0.00045024894643574953, + "src/backend/tests/unit/test_logger.py::test_init_default": 0.0005865829880349338, + "src/backend/tests/unit/test_logger.py::test_init_with_env_variable": 0.0005718329921364784, + "src/backend/tests/unit/test_logger.py::test_len": 0.0002712510759010911, + "src/backend/tests/unit/test_logger.py::test_max_size": 0.0002471670741215348, + "src/backend/tests/unit/test_logger.py::test_write": 0.0003470420488156378, + "src/backend/tests/unit/test_logger.py::test_write_overflow": 0.0005332900327630341, + "src/backend/tests/unit/test_login.py::test_login_successful": 1.5611454580212012, + "src/backend/tests/unit/test_login.py::test_login_unsuccessful_wrong_password": 4.032776457956061, + "src/backend/tests/unit/test_login.py::test_login_unsuccessful_wrong_username": 0.879311416996643, + "src/backend/tests/unit/test_messages.py::test_add_messages": 3.309187417034991, + "src/backend/tests/unit/test_messages.py::test_add_messagetables": 2.677147793059703, + "src/backend/tests/unit/test_messages.py::test_convert_to_langchain[convert_to_langchain_type]": 0.001319041009992361, + "src/backend/tests/unit/test_messages.py::test_convert_to_langchain[message]": 0.0014611250371672213, + "src/backend/tests/unit/test_messages.py::test_delete_messages": 1.7512655410100706, + "src/backend/tests/unit/test_messages.py::test_get_messages": 2.679642292088829, + "src/backend/tests/unit/test_messages.py::test_store_message": 2.0734372919541784, + "src/backend/tests/unit/test_messages.py::test_update_message_with_content_blocks": 2.0395035409601405, + "src/backend/tests/unit/test_messages.py::test_update_message_with_nested_properties": 1.6801010410417803, + "src/backend/tests/unit/test_messages.py::test_update_message_with_timestamp": 1.7369747090269811, + "src/backend/tests/unit/test_messages.py::test_update_mixed_messages": 0.7533785830019042, + "src/backend/tests/unit/test_messages.py::test_update_multiple_messages": 0.7652632489916869, + "src/backend/tests/unit/test_messages.py::test_update_multiple_messages_with_timestamps": 1.7809107069624588, + "src/backend/tests/unit/test_messages.py::test_update_nonexistent_message": 0.9113652509986423, + "src/backend/tests/unit/test_messages.py::test_update_single_message": 0.8359092500177212, + "src/backend/tests/unit/test_messages_endpoints.py::test_delete_messages": 3.083023541024886, + "src/backend/tests/unit/test_messages_endpoints.py::test_delete_messages_session": 2.9022462490247563, + "src/backend/tests/unit/test_messages_endpoints.py::test_no_messages_found_with_given_session_id": 2.7207199159893207, + "src/backend/tests/unit/test_messages_endpoints.py::test_successfully_update_session_id": 2.559589582902845, + "src/backend/tests/unit/test_messages_endpoints.py::test_update_message": 2.7309321249485947, + "src/backend/tests/unit/test_messages_endpoints.py::test_update_message_not_found": 2.71192433295073, + "src/backend/tests/unit/test_process.py::test_load_langchain_object_with_cached_session": 0.009255001961719245, + "src/backend/tests/unit/test_process.py::test_load_langchain_object_with_no_cached_session": 2.9178847920848057, + "src/backend/tests/unit/test_process.py::test_load_langchain_object_without_session_id": 2.8941064990358427, + "src/backend/tests/unit/test_process.py::test_multiple_tweaks": 0.00024433404905721545, + "src/backend/tests/unit/test_process.py::test_no_tweaks": 0.0008997919503599405, + "src/backend/tests/unit/test_process.py::test_single_tweak": 0.00036458304384723306, + "src/backend/tests/unit/test_process.py::test_tweak_no_node_id": 0.0010478749754838645, + "src/backend/tests/unit/test_process.py::test_tweak_not_in_template": 0.0002571250079199672, + "src/backend/tests/unit/test_schema.py::TestInput::test_field_type_str": 0.0015098329749889672, + "src/backend/tests/unit/test_schema.py::TestInput::test_field_type_type": 0.0007612500339746475, + "src/backend/tests/unit/test_schema.py::TestInput::test_input_to_dict": 0.0018923740135505795, + "src/backend/tests/unit/test_schema.py::TestInput::test_invalid_field_type": 0.0006862920126877725, + "src/backend/tests/unit/test_schema.py::TestInput::test_post_process_type_function": 0.0023312500561587512, + "src/backend/tests/unit/test_schema.py::TestInput::test_serialize_field_type": 0.004855999955907464, + "src/backend/tests/unit/test_schema.py::TestInput::test_validate_type_class": 0.00027591700199991465, + "src/backend/tests/unit/test_schema.py::TestInput::test_validate_type_string": 0.00278158305445686, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_add_types": 0.0010019989567808807, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_default": 0.00032329204259440303, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_set_selected": 0.0015942909521982074, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_to_dict": 0.0022627910366281867, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_validate_display_name": 0.00040600099600851536, + "src/backend/tests/unit/test_schema.py::TestOutput::test_output_validate_model": 0.0002532079815864563, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_custom_type": 0.0019193329499103129, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_int_type": 0.00021583307534456253, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_list_custom_type": 0.0003355839871801436, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_list_int_type": 0.0020573349902406335, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_union_custom_type": 0.00026191596407443285, + "src/backend/tests/unit/test_schema.py::TestPostProcessType::test_union_type": 0.0024350410094484687, + "src/backend/tests/unit/test_setup_superuser.py::test_teardown_superuser_default_superuser": 0.006652375042904168, + "src/backend/tests/unit/test_setup_superuser.py::test_teardown_superuser_no_default_superuser": 0.006145125022158027, + "src/backend/tests/unit/test_telemetry.py::test_gauge": 0.003054290951695293, + "src/backend/tests/unit/test_telemetry.py::test_gauge_with_counter_method": 0.0023362910142168403, + "src/backend/tests/unit/test_telemetry.py::test_gauge_with_historgram_method": 0.004677250923123211, + "src/backend/tests/unit/test_telemetry.py::test_gauge_with_up_down_counter_method": 0.0018417509854771197, + "src/backend/tests/unit/test_telemetry.py::test_increment_counter": 0.0003165420494042337, + "src/backend/tests/unit/test_telemetry.py::test_increment_counter_empty_label": 0.001291500055231154, + "src/backend/tests/unit/test_telemetry.py::test_increment_counter_missing_mandatory_label": 0.0019142500241287053, + "src/backend/tests/unit/test_telemetry.py::test_increment_counter_unregisted_metric": 0.010540709015913308, + "src/backend/tests/unit/test_telemetry.py::test_init": 0.0003274579648859799, + "src/backend/tests/unit/test_telemetry.py::test_missing_labels": 0.00035625003511086106, + "src/backend/tests/unit/test_telemetry.py::test_multithreaded_singleton": 0.0015401250566355884, + "src/backend/tests/unit/test_telemetry.py::test_multithreaded_singleton_race_condition": 0.020977791980840266, + "src/backend/tests/unit/test_telemetry.py::test_opentelementry_singleton": 0.0002914160140790045, + "src/backend/tests/unit/test_template.py::test_build_template_from_function": 0.0019054170115850866, + "src/backend/tests/unit/test_template.py::test_get_base_classes": 0.00047154095955193043, + "src/backend/tests/unit/test_template.py::test_get_default_factory": 0.0006982500781305134, + "src/backend/tests/unit/test_user.py::test_add_user": 3.429326084034983, + "src/backend/tests/unit/test_user.py::test_data_consistency_after_delete": 3.084409792034421, + "src/backend/tests/unit/test_user.py::test_data_consistency_after_update": 4.112100625992753, + "src/backend/tests/unit/test_user.py::test_deactivated_user_cannot_access": 1.6135848330450244, + "src/backend/tests/unit/test_user.py::test_deactivated_user_cannot_login": 2.550756209064275, + "src/backend/tests/unit/test_user.py::test_delete_user": 3.7109769160160795, + "src/backend/tests/unit/test_user.py::test_delete_user_wrong_id": 3.291543999046553, + "src/backend/tests/unit/test_user.py::test_inactive_user": 3.2723513320670463, + "src/backend/tests/unit/test_user.py::test_normal_user_cant_delete_user": 3.7365196659811772, + "src/backend/tests/unit/test_user.py::test_normal_user_cant_read_all_users": 3.938592832942959, + "src/backend/tests/unit/test_user.py::test_patch_reset_password": 4.011823332984932, + "src/backend/tests/unit/test_user.py::test_patch_user": 3.110160624026321, + "src/backend/tests/unit/test_user.py::test_patch_user_wrong_id": 3.0659845010377467, + "src/backend/tests/unit/test_user.py::test_read_all_users": 2.8889535000780597, + "src/backend/tests/unit/test_user.py::test_user_waiting_for_approval": 1.0094337909831665, + "src/backend/tests/unit/test_validate_code.py::test_create_class": 0.002269042015541345, + "src/backend/tests/unit/test_validate_code.py::test_create_class_with_external_variables_and_functions": 0.0017634579562582076, + "src/backend/tests/unit/test_validate_code.py::test_create_class_with_multiple_external_classes": 0.002165457990486175, + "src/backend/tests/unit/test_validate_code.py::test_create_function": 0.000596500001847744, + "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_function": 0.0006714170449413359, + "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_module": 0.00045245798537507653, + "src/backend/tests/unit/test_validate_code.py::test_execute_function_missing_schema": 0.001392040983773768, + "src/backend/tests/unit/test_validate_code.py::test_execute_function_success": 0.0015526249771937728, + "src/backend/tests/unit/test_validate_code.py::test_validate_code": 0.008368333976250142, + "src/backend/tests/unit/test_version.py::test_compute_main": 0.0010042919893749058, + "src/backend/tests/unit/test_version.py::test_version": 0.001986499992199242, + "src/backend/tests/unit/test_webhook.py::test_webhook_endpoint": 14.715468042064458, + "src/backend/tests/unit/test_webhook.py::test_webhook_flow_on_run_endpoint": 3.046824499964714, + "src/backend/tests/unit/test_webhook.py::test_webhook_with_random_payload": 11.656425916939043, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol::password@host-protocol::password@host]": 0.0016742500010877848, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pa:ss:word@host-protocol:user:pa:ss:word@host]": 0.0009482500026933849, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pa@ss@word@host-protocol:user:pa%40ss%40word@host]": 0.0008808330749161541, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:pass@word@host-protocol:user:pass%40word@host]": 0.0029695829143747687, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:password@-protocol:user:password@]": 0.001982749963644892, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user:password@host-protocol:user:password@host]": 0.0005745830712839961, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[protocol:user@host-protocol:user@host]": 0.0007132510654628277, + "src/backend/tests/unit/utils/test_connection_string_parser.py::test_transform_connection_string[user:password@host-user:password@host]": 0.0009918760042637587, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[-]": 0.0019683760474435985, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/\\ndocu\\nments/file.txt-/home/user/\\\\ndocu\\\\nments/file.txt]": 0.0036366250133141875, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/docu\\n\\nments/file.txt-/home/user/docu\\\\n\\\\nments/file.txt]": 0.00254416698589921, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/docu\\nments/file.txt-/home/user/docu\\\\nments/file.txt]": 0.0009456239640712738, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/documents/\\n-/home/user/documents/\\\\n]": 0.025744416925590485, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/documents/file.txt-/home/user/documents/file.txt]": 0.0015144990175031126, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[/home/user/my-\\ndocs/special_file!.pdf-/home/user/my-\\\\ndocs/special_file!.pdf]": 0.001220542995724827, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[C:/Users\\\\Documents/file.txt-C:/Users\\\\Documents/file.txt]": 0.0012114160344935954, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[C:\\\\Users\\\\Documents\\\\-C:\\\\Users\\\\Documents\\\\]": 0.0023360829218290746, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[C:\\\\Users\\\\Documents\\\\file.txt-C:\\\\Users\\\\Documents\\\\file.txt]": 0.0007423330680467188, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[C:\\\\Users\\\\\\nDocuments\\\\file.txt-C:\\\\Users\\\\\\\\nDocuments\\\\file.txt]": 0.0023515840875916183, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[\\\\\\\\server\\\\share\\\\file.txt-\\\\\\\\server\\\\share\\\\file.txt]": 0.001738708931952715, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[\\n/home/user/documents/-\\\\n/home/user/documents/]": 0.0021785409771837294, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path[\\n\\n\\n-\\\\n\\\\n\\\\n]": 0.0007877919706515968, + "src/backend/tests/unit/utils/test_format_directory_path.py::test_format_directory_path_type": 0.004260540998075157, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_convert_image_to_base64_directory": 0.0007352500106208026, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_convert_image_to_base64_empty_path": 0.0010516680194996297, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_convert_image_to_base64_nonexistent_file": 0.0009393750224262476, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_convert_image_to_base64_success": 0.0010400419705547392, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_create_data_url_invalid_file": 0.0012756659998558462, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_create_data_url_success": 0.00212958303745836, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_create_data_url_unrecognized_extension": 0.0019956670003011823, + "src/backend/tests/unit/utils/test_image_utils.py::TestImageUtils::test_create_data_url_with_custom_mime": 0.003665584954433143, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[-]": 0.0006267070421017706, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/\\ndocu\\nments/file.txt-/home/user/\\\\ndocu\\\\nments/file.txt]": 0.0007902910001575947, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/docu\\n\\nments/file.txt-/home/user/docu\\\\n\\\\nments/file.txt]": 0.0003085819771513343, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/docu\\nments/file.txt-/home/user/docu\\\\nments/file.txt]": 0.00034262501867488027, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/documents/\\n-/home/user/documents/\\\\n]": 0.0003343760035932064, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/documents/file.txt-/home/user/documents/file.txt]": 0.0003256250056438148, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[/home/user/my-\\ndocs/special_file!.pdf-/home/user/my-\\\\ndocs/special_file!.pdf]": 0.0012586249504238367, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[C:\\\\Users\\\\\\nDocuments\\\\file.txt-C:\\\\Users\\\\\\\\nDocuments\\\\file.txt]": 0.0008411250310018659, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[\\n/home/user/documents/-\\\\n/home/user/documents/]": 0.0009818340186029673, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path[\\n\\n\\n-\\\\n\\\\n\\\\n]": 0.0006172500434331596, + "src/backend/tests/unit/utils/test_rewrite_file_path.py::test_format_directory_path_type": 0.0002429169835522771, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_negative_max_length": 0.00025949894916266203, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[-5-]": 0.0003167500835843384, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[12345-3-12345]": 0.00030608405359089375, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[3.141592653589793-4-3.141592653589793]": 0.0003646670375019312, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[None-5-None]": 0.0003825009916909039, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[True-2-True]": 0.0005934149958193302, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[\\u3053\\u3093\\u306b\\u3061\\u306f-3-\\u3053\\u3093\\u306b...]": 0.00038300000596791506, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[a-1-a]": 0.0003126669325865805, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-10-aaaaaaaaaa...]": 0.00035062601091340184, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[exact-5-exact]": 0.0002943320432677865, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[long string-7-long st...]": 0.00034562498331069946, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_non_dict_list[short string-20-short string]": 0.00034145801328122616, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_none_max_length": 0.0005399579531513155, + "src/backend/tests/unit/utils/test_truncate_long_strings.py::test_truncate_long_strings_zero_max_length": 0.0002442080294713378, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data0-10-expected0]": 0.0003149179392494261, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data1-5-expected1]": 0.0003458330174908042, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data2-7-expected2]": 0.00031883292831480503, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data3-8-expected3]": 0.00030812493059784174, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data4-10-expected4]": 0.00028958305483683944, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data5-10-expected5]": 0.00033783295657485723, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data6-10-expected6]": 0.0004022919456474483, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data7-5-expected7]": 0.0003520839964039624, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data8-3-expected8]": 0.0003352909698151052, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings[input_data9-10-expected9]": 0.002482500043697655, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_default_max_length": 0.00047945795813575387, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_in_place_modification": 0.0003923350013792515, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_invalid_input": 0.0008553340448997915, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_negative_max_length": 0.0005078340182080865, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_no_modification": 0.0006442510057240725, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_small_max_length": 0.001186543027870357, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_type_preservation": 0.00046733306953683496, + "src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py::test_truncate_long_strings_zero_max_length": 0.0007919160416349769 } \ No newline at end of file diff --git a/src/backend/tests/__init__.py b/src/backend/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/api_keys.py b/src/backend/tests/api_keys.py new file mode 100644 index 000000000000..42260f744a51 --- /dev/null +++ b/src/backend/tests/api_keys.py @@ -0,0 +1,34 @@ +import os.path + +# we need to import tmpdir + + +def get_required_env_var(var: str) -> str: + """Get the value of the specified environment variable. + + Args: + var (str): The environment variable to get. + + Returns: + str: The value of the environment variable. + + Raises: + ValueError: If the environment variable is not set. + """ + value = os.getenv(var) + if not value: + msg = f"Environment variable {var} is not set" + raise ValueError(msg) + return value + + +def get_openai_api_key() -> str: + return get_required_env_var("OPENAI_API_KEY") + + +def get_astradb_application_token() -> str: + return get_required_env_var("ASTRA_DB_APPLICATION_TOKEN") + + +def get_astradb_api_endpoint() -> str: + return get_required_env_var("ASTRA_DB_API_ENDPOINT") diff --git a/src/backend/tests/base.py b/src/backend/tests/base.py new file mode 100644 index 000000000000..3e463f9622d7 --- /dev/null +++ b/src/backend/tests/base.py @@ -0,0 +1,146 @@ +from typing import Any + +import pytest +from typing_extensions import TypedDict + +from tests.constants import SUPPORTED_VERSIONS +from tests.integration.utils import build_component_instance_for_tests + + +class VersionComponentMapping(TypedDict): + version: str + module: str + file_name: str + + +# Sentinel value to mark undefined test cases +DID_NOT_EXIST = object() + + +class ComponentTestBase: + @pytest.fixture(autouse=True) + def _validate_required_fixtures( + self, + component_class: type[Any], + default_kwargs: dict[str, Any], + file_names_mapping: list[VersionComponentMapping], + ) -> None: + """Validate that all required fixtures are implemented.""" + # If we get here, all fixtures exist + + @pytest.fixture + def component_class(self) -> type[Any]: + """Return the component class to test.""" + msg = f"{self.__class__.__name__} must implement the component_class fixture" + raise NotImplementedError(msg) + + @pytest.fixture + def default_kwargs(self) -> dict[str, Any]: + """Return the default kwargs for the component.""" + return {} + + @pytest.fixture + def file_names_mapping(self) -> list[VersionComponentMapping]: + """Return the file names mapping for different versions.""" + msg = f"{self.__class__.__name__} must implement the file_names_mapping fixture" + raise NotImplementedError(msg) + + def test_latest_version(self, component_class: type[Any], default_kwargs: dict[str, Any]) -> None: + """Test that the component works with the latest version.""" + result = component_class(**default_kwargs)() + assert result is not None, "Component returned None for the latest version." + + def test_all_versions_have_a_file_name_defined(self, file_names_mapping: list[VersionComponentMapping]) -> None: + """Ensure all supported versions have a file name defined.""" + if not file_names_mapping: + msg = ( + f"file_names_mapping is empty for {self.__class__.__name__}. " + "Please define the version mappings for your component." + ) + raise AssertionError(msg) + + version_mappings = {mapping["version"]: mapping for mapping in file_names_mapping} + + for version in SUPPORTED_VERSIONS: + if version not in version_mappings: + supported_versions = ", ".join(sorted(m["version"] for m in file_names_mapping)) + msg = ( + f"Version {version} not found in file_names_mapping for {self.__class__.__name__}.\n" + f"Currently defined versions: {supported_versions}\n" + "Please add this version to your component's file_names_mapping." + ) + raise AssertionError(msg) + + mapping = version_mappings[version] + if mapping["file_name"] is None: + msg = ( + f"file_name is None for version {version} in {self.__class__.__name__}.\n" + "Please provide a valid file_name in file_names_mapping or set it to DID_NOT_EXIST." + ) + raise AssertionError(msg) + + if mapping["module"] is None: + msg = ( + f"module is None for version {version} in {self.__class__.__name__}.\n" + "Please provide a valid module name in file_names_mapping or set it to DID_NOT_EXIST." + ) + raise AssertionError(msg) + + @pytest.mark.parametrize("version", SUPPORTED_VERSIONS) + def test_component_versions( + self, + version: str, + default_kwargs: dict[str, Any], + file_names_mapping: list[VersionComponentMapping], + ) -> None: + """Test if the component works across different versions.""" + version_mappings = {mapping["version"]: mapping for mapping in file_names_mapping} + + mapping = version_mappings[version] + if mapping["file_name"] is DID_NOT_EXIST: + pytest.skip(f"Skipping version {version} as it does not have a file name defined.") + + try: + instance, component_code = build_component_instance_for_tests( + version, file_name=mapping["file_name"], module=mapping["module"], **default_kwargs + ) + except Exception as e: + msg = ( + f"Failed to build component instance for {self.__class__.__name__} " + f"version {version}:\n" + f"Module: {mapping['module']}\n" + f"File: {mapping['file_name']}\n" + f"Error: {e!s}" + ) + raise AssertionError(msg) from e + + try: + result = instance() + except Exception as e: + msg = ( + f"Failed to execute component {self.__class__.__name__} " + f"for version {version}:\n" + f"Module: {mapping['module']}\n" + f"File: {mapping['file_name']}\n" + f"Error: {e!s}\n" + f"Component Code: {component_code}" + ) + raise AssertionError(msg) from e + + if result is None: + msg = ( + f"Component {self.__class__.__name__} returned None " + f"for version {version}.\n" + f"Module: {mapping['module']}\n" + f"File: {mapping['file_name']}" + ) + raise AssertionError(msg) + + +@pytest.mark.usefixtures("client") +class ComponentTestBaseWithClient(ComponentTestBase): + pass + + +class ComponentTestBaseWithoutClient(ComponentTestBase): + pass diff --git a/src/backend/tests/blockbuster.py b/src/backend/tests/blockbuster.py new file mode 100644 index 000000000000..3a11201eb3f4 --- /dev/null +++ b/src/backend/tests/blockbuster.py @@ -0,0 +1,140 @@ +import asyncio +import inspect +import io +import os +import socket +import ssl +import sys +import time +from importlib.abc import FileLoader + +import forbiddenfruit + + +class BlockingError(Exception): ... + + +def _blocking_error(func): + if inspect.isbuiltin(func): + msg = f"Blocking call to {func.__qualname__} ({func.__self__})" + elif inspect.ismethoddescriptor(func): + msg = f"Blocking call to {func}" + else: + msg = f"Blocking call to {func.__module__}.{func.__qualname__}" + return BlockingError(msg) + + +def _wrap_blocking(func): + def wrapper(*args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(*args, **kwargs) + raise _blocking_error(func) + + return wrapper + + +def _wrap_time_blocking(func): + def wrapper(*args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(*args, **kwargs) + for frame_info in inspect.stack(): + if frame_info.filename.endswith("pydev/pydevd.py") and frame_info.function == "_do_wait_suspend": + return func(*args, **kwargs) + + raise _blocking_error(func) + + return wrapper + + +def _wrap_os_blocking(func): + def os_op(fd, *args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(fd, *args, **kwargs) + if os.get_blocking(fd): + raise _blocking_error(func) + return func(fd, *args, **kwargs) + + return os_op + + +def _wrap_socket_blocking(func): + def socket_op(self, *args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(self, *args, **kwargs) + if self.getblocking(): + raise _blocking_error(func) + return func(self, *args, **kwargs) + + return socket_op + + +def _wrap_file_read_blocking(func): + def file_op(self, *args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(self, *args, **kwargs) + for frame_info in inspect.stack(): + if isinstance(frame_info.frame.f_locals.get("self"), FileLoader): + return func(self, *args, **kwargs) + if frame_info.filename.endswith("_pytest/assertion/rewrite.py") and frame_info.function in { + "_rewrite_test", + "_read_pyc", + }: + return func(self, *args, **kwargs) + raise _blocking_error(func) + + return file_op + + +def _wrap_file_write_blocking(func): + def file_op(self, *args, **kwargs): + try: + asyncio.get_running_loop() + except RuntimeError: + return func(self, *args, **kwargs) + for frame_info in inspect.stack(): + if frame_info.filename.endswith("_pytest/assertion/rewrite.py") and frame_info.function == "_write_pyc": + return func(self, *args, **kwargs) + if self not in {sys.stdout, sys.stderr}: + raise _blocking_error(func) + return func(self, *args, **kwargs) + + return file_op + + +def init(): + time.sleep = _wrap_time_blocking(time.sleep) + + os.read = _wrap_os_blocking(os.read) + os.write = _wrap_os_blocking(os.write) + + socket.socket.send = _wrap_socket_blocking(socket.socket.send) + socket.socket.sendall = _wrap_socket_blocking(socket.socket.sendall) + socket.socket.sendto = _wrap_socket_blocking(socket.socket.sendto) + socket.socket.recv = _wrap_socket_blocking(socket.socket.recv) + socket.socket.recv_into = _wrap_socket_blocking(socket.socket.recv_into) + socket.socket.recvfrom = _wrap_socket_blocking(socket.socket.recvfrom) + socket.socket.recvfrom_into = _wrap_socket_blocking(socket.socket.recvfrom_into) + socket.socket.recvmsg = _wrap_socket_blocking(socket.socket.recvmsg) + socket.socket.recvmsg_into = _wrap_socket_blocking(socket.socket.recvmsg_into) + + ssl.SSLSocket.write = _wrap_socket_blocking(ssl.SSLSocket.write) + ssl.SSLSocket.send = _wrap_socket_blocking(ssl.SSLSocket.send) + ssl.SSLSocket.read = _wrap_socket_blocking(ssl.SSLSocket.read) + ssl.SSLSocket.recv = _wrap_socket_blocking(ssl.SSLSocket.recv) + + forbiddenfruit.curse(io.BufferedReader, "read", _wrap_file_read_blocking(io.BufferedReader.read)) + forbiddenfruit.curse(io.BufferedWriter, "write", _wrap_file_write_blocking(io.BufferedWriter.write)) + forbiddenfruit.curse(io.BufferedRandom, "read", _wrap_blocking(io.BufferedRandom.read)) + forbiddenfruit.curse(io.BufferedRandom, "write", _wrap_file_write_blocking(io.BufferedRandom.write)) + forbiddenfruit.curse(io.TextIOWrapper, "read", _wrap_file_read_blocking(io.TextIOWrapper.read)) + forbiddenfruit.curse(io.TextIOWrapper, "write", _wrap_file_write_blocking(io.TextIOWrapper.write)) diff --git a/src/backend/tests/conftest.py b/src/backend/tests/conftest.py index 2dc4f5843422..b236a733f962 100644 --- a/src/backend/tests/conftest.py +++ b/src/backend/tests/conftest.py @@ -1,37 +1,46 @@ +import asyncio import json -import os.path import shutil # we need to import tmpdir import tempfile +from collections.abc import AsyncGenerator from contextlib import contextmanager, suppress from pathlib import Path -from typing import TYPE_CHECKING, AsyncGenerator +from typing import TYPE_CHECKING +from uuid import UUID import orjson import pytest +from asgi_lifespan import LifespanManager from dotenv import load_dotenv from fastapi.testclient import TestClient -from httpx import AsyncClient -from sqlmodel import Session, SQLModel, create_engine, select -from sqlmodel.pool import StaticPool -from typer.testing import CliRunner - -from langflow.graph.graph.base import Graph +from httpx import ASGITransport, AsyncClient +from langflow.graph import Graph from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_password_hash from langflow.services.database.models.api_key.model import ApiKey from langflow.services.database.models.flow.model import Flow, FlowCreate from langflow.services.database.models.folder.model import Folder -from langflow.services.database.models.user.model import User, UserCreate +from langflow.services.database.models.transactions.model import TransactionTable +from langflow.services.database.models.user.model import User, UserCreate, UserRead +from langflow.services.database.models.vertex_builds.crud import delete_vertex_builds_by_flow_id from langflow.services.database.utils import session_getter from langflow.services.deps import get_db_service +from loguru import logger +from sqlmodel import Session, SQLModel, create_engine, select +from sqlmodel.pool import StaticPool +from typer.testing import CliRunner + +from tests import blockbuster +from tests.api_keys import get_openai_api_key if TYPE_CHECKING: from langflow.services.database.service import DatabaseService load_dotenv() +blockbuster.init() def pytest_configure(config): @@ -76,12 +85,42 @@ def get_text(): assert path.exists(), f"File {path} does not exist. Available files: {list(data_path.iterdir())}" -@pytest.fixture() +def delete_transactions_by_flow_id(db: Session, flow_id: UUID): + stmt = select(TransactionTable).where(TransactionTable.flow_id == flow_id) + transactions = db.exec(stmt) + for transaction in transactions: + db.delete(transaction) + db.commit() + + +def _delete_transactions_and_vertex_builds(session, user: User): + flow_ids = [flow.id for flow in user.flows] + for flow_id in flow_ids: + if not flow_id: + continue + delete_vertex_builds_by_flow_id(session, flow_id) + delete_transactions_by_flow_id(session, flow_id) + + +@pytest.fixture +def caplog(caplog: pytest.LogCaptureFixture): + handler_id = logger.add( + caplog.handler, + format="{message}", + level=0, + filter=lambda record: record["level"].no >= caplog.handler.level, + enqueue=False, # Set to 'True' if your test is spawning child processes. + ) + yield caplog + logger.remove(handler_id) + + +@pytest.fixture async def async_client() -> AsyncGenerator: from langflow.main import create_app app = create_app() - async with AsyncClient(app=app, base_url="http://testserver") as client: + async with AsyncClient(app=app, base_url="http://testserver", http2=True) as client: yield client @@ -91,6 +130,7 @@ def session_fixture(): SQLModel.metadata.create_all(engine) with Session(engine) as session: yield session + SQLModel.metadata.drop_all(engine) # Add this line to clean up tables class Config: @@ -100,12 +140,12 @@ class Config: @pytest.fixture(name="load_flows_dir") def load_flows_dir(): - tempdir = tempfile.TemporaryDirectory() - yield tempdir.name + with tempfile.TemporaryDirectory() as tempdir: + yield tempdir @pytest.fixture(name="distributed_env") -def setup_env(monkeypatch): +def _setup_env(monkeypatch): monkeypatch.setenv("LANGFLOW_CACHE_TYPE", "redis") monkeypatch.setenv("LANGFLOW_REDIS_HOST", "result_backend") monkeypatch.setenv("LANGFLOW_REDIS_PORT", "6379") @@ -119,35 +159,41 @@ def setup_env(monkeypatch): @pytest.fixture(name="distributed_client") -def distributed_client_fixture(session: Session, monkeypatch, distributed_env): +def distributed_client_fixture( + session: Session, # noqa: ARG001 + monkeypatch, + distributed_env, # noqa: ARG001 +): # Here we load the .env from ../deploy/.env from langflow.core import celery_app db_dir = tempfile.mkdtemp() - db_path = Path(db_dir) / "test.db" - monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") - monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") - # monkeypatch langflow.services.task.manager.USE_CELERY to True - # monkeypatch.setattr(manager, "USE_CELERY", True) - monkeypatch.setattr(celery_app, "celery_app", celery_app.make_celery("langflow", Config)) + try: + db_path = Path(db_dir) / "test.db" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") + # monkeypatch langflow.services.task.manager.USE_CELERY to True + # monkeypatch.setattr(manager, "USE_CELERY", True) + monkeypatch.setattr(celery_app, "celery_app", celery_app.make_celery("langflow", Config)) - # def get_session_override(): - # return session + # def get_session_override(): + # return session - from langflow.main import create_app + from langflow.main import create_app - app = create_app() + app = create_app() - # app.dependency_overrides[get_session] = get_session_override - with TestClient(app) as client: - yield client + # app.dependency_overrides[get_session] = get_session_override + with TestClient(app) as client: + yield client + finally: + shutil.rmtree(db_dir) # Clean up the temporary directory app.dependency_overrides.clear() monkeypatch.undo() def get_graph(_type="basic"): - """Get a graph from a json file""" - + """Get a graph from a json file.""" if _type == "basic": path = pytest.BASIC_EXAMPLE_PATH elif _type == "complex": @@ -155,7 +201,7 @@ def get_graph(_type="basic"): elif _type == "openapi": path = pytest.OPENAPI_EXAMPLE_PATH - with open(path, "r") as f: + with path.open(encoding="utf-8") as f: flow_graph = json.load(f) data_graph = flow_graph["data"] nodes = data_graph["nodes"] @@ -167,7 +213,7 @@ def get_graph(_type="basic"): @pytest.fixture def basic_graph_data(): - with open(pytest.BASIC_EXAMPLE_PATH, "r") as f: + with pytest.BASIC_EXAMPLE_PATH.open(encoding="utf-8") as f: return json.load(f) @@ -188,81 +234,94 @@ def openapi_graph(): @pytest.fixture def json_flow(): - with open(pytest.BASIC_EXAMPLE_PATH, "r") as f: - return f.read() + return pytest.BASIC_EXAMPLE_PATH.read_text(encoding="utf-8") @pytest.fixture def grouped_chat_json_flow(): - with open(pytest.GROUPED_CHAT_EXAMPLE_PATH, "r") as f: - return f.read() + return pytest.GROUPED_CHAT_EXAMPLE_PATH.read_text(encoding="utf-8") @pytest.fixture def one_grouped_chat_json_flow(): - with open(pytest.ONE_GROUPED_CHAT_EXAMPLE_PATH, "r") as f: - return f.read() + return pytest.ONE_GROUPED_CHAT_EXAMPLE_PATH.read_text(encoding="utf-8") @pytest.fixture def vector_store_grouped_json_flow(): - with open(pytest.VECTOR_STORE_GROUPED_EXAMPLE_PATH, "r") as f: - return f.read() + return pytest.VECTOR_STORE_GROUPED_EXAMPLE_PATH.read_text(encoding="utf-8") @pytest.fixture def json_flow_with_prompt_and_history(): - with open(pytest.BASIC_CHAT_WITH_PROMPT_AND_HISTORY, "r") as f: - return f.read() + return pytest.BASIC_CHAT_WITH_PROMPT_AND_HISTORY.read_text(encoding="utf-8") @pytest.fixture def json_simple_api_test(): - with open(pytest.SIMPLE_API_TEST, "r") as f: - return f.read() + return pytest.SIMPLE_API_TEST.read_text(encoding="utf-8") @pytest.fixture def json_vector_store(): - with open(pytest.VECTOR_STORE_PATH, "r") as f: - return f.read() + return pytest.VECTOR_STORE_PATH.read_text(encoding="utf-8") @pytest.fixture def json_webhook_test(): - with open(pytest.WEBHOOK_TEST, "r") as f: - return f.read() + return pytest.WEBHOOK_TEST.read_text(encoding="utf-8") @pytest.fixture def json_memory_chatbot_no_llm(): - with open(pytest.MEMORY_CHATBOT_NO_LLM, "r") as f: - return f.read() + return pytest.MEMORY_CHATBOT_NO_LLM.read_text(encoding="utf-8") + + +@pytest.fixture(autouse=True) +def deactivate_tracing(monkeypatch): + monkeypatch.setenv("LANGFLOW_DEACTIVATE_TRACING", "true") + yield + monkeypatch.undo() -@pytest.fixture(name="client", autouse=True) -def client_fixture(session: Session, monkeypatch, request, load_flows_dir): +@pytest.fixture(name="client") +async def client_fixture( + session: Session, # noqa: ARG001 + monkeypatch, + request, + load_flows_dir, +): # Set the database url to a test database if "noclient" in request.keywords: yield else: - db_dir = tempfile.mkdtemp() - db_path = Path(db_dir) / "test.db" - monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") - monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") - if "load_flows" in request.keywords: - shutil.copyfile( - pytest.BASIC_EXAMPLE_PATH, os.path.join(load_flows_dir, "c54f9130-f2fa-4a3e-b22a-3856d946351b.json") - ) - monkeypatch.setenv("LANGFLOW_LOAD_FLOWS_PATH", load_flows_dir) - monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "true") - - from langflow.main import create_app - - app = create_app() + def init_app(): + db_dir = tempfile.mkdtemp() + db_path = Path(db_dir) / "test.db" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") + if "load_flows" in request.keywords: + shutil.copyfile( + pytest.BASIC_EXAMPLE_PATH, Path(load_flows_dir) / "c54f9130-f2fa-4a3e-b22a-3856d946351b.json" + ) + monkeypatch.setenv("LANGFLOW_LOAD_FLOWS_PATH", load_flows_dir) + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "true") + + from langflow.main import create_app + + app = create_app() + db_service = get_db_service() + db_service.database_url = f"sqlite:///{db_path}" + db_service.reload_engine() + return app, db_path + + app, db_path = await asyncio.to_thread(init_app) # app.dependency_overrides[get_session] = get_session_override - with TestClient(app) as client: + async with ( + LifespanManager(app, startup_timeout=None, shutdown_timeout=None) as manager, + AsyncClient(transport=ASGITransport(app=manager.app), base_url="http://testserver/", http2=True) as client, + ): yield client # app.dependency_overrides.clear() monkeypatch.undo() @@ -273,13 +332,13 @@ def client_fixture(session: Session, monkeypatch, request, load_flows_dir): # create a fixture for session_getter above @pytest.fixture(name="session_getter") -def session_getter_fixture(client): +def session_getter_fixture(client): # noqa: ARG001 @contextmanager def blank_session_getter(db_service: "DatabaseService"): with Session(db_service.engine) as session: yield session - yield blank_session_getter + return blank_session_getter @pytest.fixture @@ -288,39 +347,89 @@ def runner(): @pytest.fixture -def test_user(client): +async def test_user(client): user_data = UserCreate( username="testuser", - password="testpassword", + password="testpassword", # noqa: S106 ) - response = client.post("/api/v1/users", json=user_data.model_dump()) + response = await client.post("api/v1/users/", json=user_data.model_dump()) assert response.status_code == 201 - return response.json() + user = response.json() + yield user + # Clean up + await client.delete(f"/api/v1/users/{user['id']}") -@pytest.fixture(scope="function") -def active_user(client): +@pytest.fixture +def active_user(client): # noqa: ARG001 db_manager = get_db_service() - with session_getter(db_manager) as session: + with db_manager.with_session() as session: user = User( username="activeuser", password=get_password_hash("testpassword"), is_active=True, is_superuser=False, ) - # check if user exists if active_user := session.exec(select(User).where(User.username == user.username)).first(): - return active_user - session.add(user) + user = active_user + else: + session.add(user) + session.commit() + session.refresh(user) + user = UserRead.model_validate(user, from_attributes=True) + yield user + # Clean up + # Now cleanup transactions, vertex_build + with db_manager.with_session() as session: + user = session.get(User, user.id) + _delete_transactions_and_vertex_builds(session, user) + session.delete(user) + session.commit() - session.refresh(user) - return user @pytest.fixture -def logged_in_headers(client, active_user): +async def logged_in_headers(client, active_user): login_data = {"username": active_user.username, "password": "testpassword"} - response = client.post("/api/v1/login", data=login_data) + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 200 + tokens = response.json() + a_token = tokens["access_token"] + return {"Authorization": f"Bearer {a_token}"} + + +@pytest.fixture +def active_super_user(client): # noqa: ARG001 + db_manager = get_db_service() + with db_manager.with_session() as session: + user = User( + username="activeuser", + password=get_password_hash("testpassword"), + is_active=True, + is_superuser=True, + ) + if active_user := session.exec(select(User).where(User.username == user.username)).first(): + user = active_user + else: + session.add(user) + session.commit() + session.refresh(user) + user = UserRead.model_validate(user, from_attributes=True) + yield user + # Clean up + # Now cleanup transactions, vertex_build + with db_manager.with_session() as session: + user = session.get(User, user.id) + _delete_transactions_and_vertex_builds(session, user) + session.delete(user) + + session.commit() + + +@pytest.fixture +async def logged_in_headers_super_user(client, active_super_user): + login_data = {"username": active_super_user.username, "password": "testpassword"} + response = await client.post("api/v1/login", data=login_data) assert response.status_code == 200 tokens = response.json() a_token = tokens["access_token"] @@ -328,7 +437,11 @@ def logged_in_headers(client, active_user): @pytest.fixture -def flow(client, json_flow: str, active_user): +def flow( + client, # noqa: ARG001 + json_flow: str, + active_user, +): from langflow.services.database.models.flow.model import FlowCreate loaded_json = json.loads(json_flow) @@ -339,82 +452,101 @@ def flow(client, json_flow: str, active_user): session.add(flow) session.commit() session.refresh(flow) - - return flow + yield flow + # Clean up + session.delete(flow) + session.commit() @pytest.fixture def json_chat_input(): - with open(pytest.CHAT_INPUT, "r") as f: - return f.read() + return pytest.CHAT_INPUT.read_text(encoding="utf-8") @pytest.fixture def json_two_outputs(): - with open(pytest.TWO_OUTPUTS, "r") as f: - return f.read() + return pytest.TWO_OUTPUTS.read_text(encoding="utf-8") @pytest.fixture -def added_flow_with_prompt_and_history(client, json_flow_with_prompt_and_history, logged_in_headers): - flow = orjson.loads(json_flow_with_prompt_and_history) +async def added_flow_webhook_test(client, json_webhook_test, logged_in_headers): + flow = orjson.loads(json_webhook_test) data = flow["data"] flow = FlowCreate(name="Basic Chat", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture -def added_flow_chat_input(client, json_chat_input, logged_in_headers): +async def added_flow_chat_input(client, json_chat_input, logged_in_headers): flow = orjson.loads(json_chat_input) data = flow["data"] flow = FlowCreate(name="Chat Input", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture -def added_flow_two_outputs(client, json_two_outputs, logged_in_headers): +async def added_flow_two_outputs(client, json_two_outputs, logged_in_headers): flow = orjson.loads(json_two_outputs) data = flow["data"] flow = FlowCreate(name="Two Outputs", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture -def added_vector_store(client, json_vector_store, logged_in_headers): +async def added_vector_store(client, json_vector_store, logged_in_headers): vector_store = orjson.loads(json_vector_store) data = vector_store["data"] vector_store = FlowCreate(name="Vector Store", description="description", data=data) - response = client.post("api/v1/flows/", json=vector_store.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=vector_store.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == vector_store.name assert response.json()["data"] == vector_store.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture -def added_webhook_test(client, json_webhook_test, logged_in_headers): +async def added_webhook_test(client, json_webhook_test, logged_in_headers): webhook_test = orjson.loads(json_webhook_test) data = webhook_test["data"] webhook_test = FlowCreate( name="Webhook Test", description="description", data=data, endpoint_name=webhook_test["endpoint_name"] ) - response = client.post("api/v1/flows/", json=webhook_test.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=webhook_test.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == webhook_test.name assert response.json()["data"] == webhook_test.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) + + +@pytest.fixture +async def flow_component(client: AsyncClient, logged_in_headers): + from langflow.components.inputs import ChatInput + + chat_input = ChatInput() + graph = Graph(start=chat_input, end=chat_input) + graph_dict = graph.dump(name="Chat Input Component") + flow = FlowCreate(**graph_dict) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture @@ -429,25 +561,28 @@ def created_api_key(active_user): db_manager = get_db_service() with session_getter(db_manager) as session: if existing_api_key := session.exec(select(ApiKey).where(ApiKey.api_key == api_key.api_key)).first(): - return existing_api_key + yield existing_api_key + return session.add(api_key) session.commit() session.refresh(api_key) - return api_key + yield api_key + # Clean up + session.delete(api_key) + session.commit() @pytest.fixture(name="simple_api_test") -def get_simple_api_test(client, logged_in_headers, json_simple_api_test): +async def get_simple_api_test(client, logged_in_headers, json_simple_api_test): # Once the client is created, we can get the starter project # Just create a new flow with the simple api test flow = orjson.loads(json_simple_api_test) data = flow["data"] flow = FlowCreate(name="Simple API Test", data=data, description="Simple API Test") - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 - assert response.json()["name"] == flow.name - assert response.json()["data"] == flow.data - return response.json() + yield response.json() + await client.delete(f"api/v1/flows/{response.json()['id']}", headers=logged_in_headers) @pytest.fixture(name="starter_project") @@ -460,8 +595,11 @@ def get_starter_project(active_user): .where(Flow.name == "Basic Prompting (Hello, World)") ).first() if not flow: - raise ValueError("No starter project found") + msg = "No starter project found" + raise ValueError(msg) + # ensure openai api key is set + get_openai_api_key() new_flow_create = FlowCreate( name=flow.name, description=flow.description, @@ -473,4 +611,7 @@ def get_starter_project(active_user): session.commit() session.refresh(new_flow) new_flow_dict = new_flow.model_dump() - return new_flow_dict + yield new_flow_dict + # Clean up + session.delete(new_flow) + session.commit() diff --git a/src/backend/tests/constants.py b/src/backend/tests/constants.py new file mode 100644 index 000000000000..cd7f004bcb35 --- /dev/null +++ b/src/backend/tests/constants.py @@ -0,0 +1 @@ +SUPPORTED_VERSIONS = ["1.0.17", "1.0.18", "1.0.19"] diff --git a/src/backend/tests/data/MemoryChatbotNoLLM.json b/src/backend/tests/data/MemoryChatbotNoLLM.json index 55b5e29169fb..14e0938a9a05 100644 --- a/src/backend/tests/data/MemoryChatbotNoLLM.json +++ b/src/backend/tests/data/MemoryChatbotNoLLM.json @@ -1 +1,886 @@ -{"id":"26c412c9-9e4a-406d-aadb-ef9a81badb3f","data":{"nodes":[{"data":{"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","id":"Prompt-iWbCC","node":{"base_classes":["Message"],"beta":false,"conditional_paths":[],"custom_fields":{"template":["context","user_message"]},"description":"Create a prompt template with dynamic variables.","display_name":"Prompt","documentation":"","edited":false,"field_order":["template"],"frozen":false,"icon":"prompts","output_types":[],"outputs":[{"cache":true,"display_name":"Prompt Message","method":"build_prompt","name":"prompt","selected":"Message","types":["Message"],"value":"__UNDEFINED__"}],"pinned":false,"template":{"_type":"Component","code":{"advanced":true,"dynamic":true,"fileTypes":[],"file_path":"","info":"","list":false,"load_from_db":false,"multiline":true,"name":"code","password":false,"placeholder":"","required":true,"show":true,"title_case":false,"type":"code","value":"from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n"},"context":{"advanced":false,"display_name":"context","dynamic":false,"field_type":"str","fileTypes":[],"file_path":"","info":"","input_types":["Message","Text"],"list":false,"load_from_db":false,"multiline":true,"name":"context","password":false,"placeholder":"","required":false,"show":true,"title_case":false,"type":"str","value":""},"template":{"advanced":false,"display_name":"Template","dynamic":false,"info":"","list":false,"load_from_db":false,"name":"template","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_input":true,"type":"prompt","value":"{context}\n\nUser: {user_message}\nAI: "},"user_message":{"advanced":false,"display_name":"user_message","dynamic":false,"field_type":"str","fileTypes":[],"file_path":"","info":"","input_types":["Message","Text"],"list":false,"load_from_db":false,"multiline":true,"name":"user_message","password":false,"placeholder":"","required":false,"show":true,"title_case":false,"type":"str","value":""}}},"type":"Prompt"},"dragging":false,"height":494,"id":"Prompt-iWbCC","position":{"x":1880.8227904110583,"y":625.8049209882275},"positionAbsolute":{"x":1880.8227904110583,"y":625.8049209882275},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Get chat inputs from the Playground.","display_name":"Chat Input","id":"ChatInput-CIGht","node":{"template":{"_type":"Component","files":{"trace_as_metadata":true,"file_path":"","fileTypes":["txt","md","mdx","csv","json","yaml","yml","xml","html","htm","pdf","docx","py","sh","sql","js","ts","tsx","jpg","jpeg","png","bmp","image"],"list":true,"required":false,"placeholder":"","show":true,"value":"","name":"files","display_name":"Files","advanced":true,"dynamic":false,"info":"Files to be sent with the message.","title_case":false,"type":"file"},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"input_value":{"trace_as_input":true,"multiline":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as input.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"User","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"User","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool"}},"description":"Get chat inputs from the Playground.","icon":"ChatInput","base_classes":["Message"],"display_name":"Chat Input","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","files"],"beta":false,"edited":false},"type":"ChatInput"},"dragging":false,"height":294,"id":"ChatInput-CIGht","position":{"x":1275.9262193671882,"y":836.1228056896347},"positionAbsolute":{"x":1275.9262193671882,"y":836.1228056896347},"selected":false,"type":"genericNode","width":384},{"data":{"description":"Display a chat message in the Playground.","display_name":"Chat Output","id":"ChatOutput-QA7ej","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"{text}","name":"data_template","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"input_value","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"required":false,"placeholder":"","show":true,"value":"Machine","name":"sender","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"AI","name":"sender_name","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"value":"","name":"session_id","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Session ID for the message.","title_case":false,"type":"str"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"value":true,"name":"should_store_message","display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false},"type":"ChatOutput"},"height":294,"id":"ChatOutput-QA7ej","position":{"x":2487.48936094892,"y":703.7197762654707},"selected":true,"type":"genericNode","width":384,"positionAbsolute":{"x":2487.48936094892,"y":703.7197762654707},"dragging":true},{"data":{"description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory","id":"Memory-amN4Z","node":{"base_classes":["BaseChatMemory","Data","Message"],"beta":false,"conditional_paths":[],"custom_fields":{},"description":"Retrieves stored chat messages from Langflow tables or an external memory.","display_name":"Chat Memory","documentation":"","edited":false,"field_order":["memory","sender","sender_name","n_messages","session_id","order","template"],"frozen":false,"icon":"message-square-more","output_types":[],"outputs":[{"cache":true,"display_name":"Messages (Data)","method":"retrieve_messages","name":"messages","selected":"Data","types":["Data"],"value":"__UNDEFINED__"},{"cache":true,"display_name":"Messages (Text)","method":"retrieve_messages_as_text","name":"messages_text","selected":"Message","types":["Message"],"value":"__UNDEFINED__"},{"cache":true,"display_name":"Memory","method":"build_lc_memory","name":"lc_memory","selected":"BaseChatMemory","types":["BaseChatMemory"],"value":"__UNDEFINED__"}],"pinned":false,"template":{"_type":"Component","code":{"advanced":true,"dynamic":true,"fileTypes":[],"file_path":"","info":"","list":false,"load_from_db":false,"multiline":true,"name":"code","password":false,"placeholder":"","required":true,"show":true,"title_case":false,"type":"code","value":"from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if sender:\n expected_type = \"Machine\" if sender == \"Machine\" else \"User\"\n stored = [m for m in stored if m.type == expected_type]\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n"},"memory":{"advanced":false,"display_name":"External Memory","dynamic":false,"info":"Retrieve messages from an external memory. If empty, it will use the Langflow tables.","input_types":["BaseChatMessageHistory"],"list":false,"name":"memory","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_metadata":true,"type":"other","value":""},"n_messages":{"advanced":true,"display_name":"Number of Messages","dynamic":false,"info":"Number of messages to retrieve.","list":false,"name":"n_messages","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_metadata":true,"type":"int","value":100},"order":{"advanced":true,"display_name":"Order","dynamic":false,"info":"Order of the messages.","name":"order","options":["Ascending","Descending"],"placeholder":"","required":false,"show":true,"title_case":false,"trace_as_metadata":true,"type":"str","value":"Ascending"},"sender":{"advanced":true,"display_name":"Sender Type","dynamic":false,"info":"Type of sender.","name":"sender","options":["Machine","User","Machine and User"],"placeholder":"","required":false,"show":true,"title_case":false,"trace_as_metadata":true,"type":"str","value":"Machine and User"},"sender_name":{"advanced":true,"display_name":"Sender Name","dynamic":false,"info":"Name of the sender.","input_types":["Message"],"list":false,"load_from_db":false,"name":"sender_name","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_input":true,"trace_as_metadata":true,"type":"str","value":""},"session_id":{"advanced":true,"display_name":"Session ID","dynamic":false,"info":"Session ID of the chat history.","input_types":["Message"],"list":false,"load_from_db":false,"name":"session_id","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_input":true,"trace_as_metadata":true,"type":"str","value":""},"template":{"advanced":true,"display_name":"Template","dynamic":false,"info":"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.","input_types":["Message"],"list":false,"load_from_db":false,"multiline":true,"name":"template","placeholder":"","required":false,"show":true,"title_case":false,"trace_as_input":true,"trace_as_metadata":true,"type":"str","value":"{sender_name}: {text}"}}},"type":"Memory"},"dragging":false,"height":366,"id":"Memory-amN4Z","position":{"x":1308.5775646859402,"y":406.95204412025845},"positionAbsolute":{"x":1308.5775646859402,"y":406.95204412025845},"selected":false,"type":"genericNode","width":384}],"edges":[{"className":"","data":{"sourceHandle":{"dataType":"ChatInput","id":"ChatInput-CIGht","name":"message","output_types":["Message"]},"targetHandle":{"fieldName":"user_message","id":"Prompt-iWbCC","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-ChatInput-CIGht{œdataTypeœ:œChatInputœ,œidœ:œChatInput-CIGhtœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-iWbCC{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"ChatInput-CIGht","sourceHandle":"{œdataTypeœ:œChatInputœ,œidœ:œChatInput-CIGhtœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-iWbCC","targetHandle":"{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"className":"","data":{"sourceHandle":{"dataType":"Memory","id":"Memory-amN4Z","name":"messages_text","output_types":["Message"]},"targetHandle":{"fieldName":"context","id":"Prompt-iWbCC","inputTypes":["Message","Text"],"type":"str"}},"id":"reactflow__edge-Memory-amN4Z{œdataTypeœ:œMemoryœ,œidœ:œMemory-amN4Zœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-iWbCC{œfieldNameœ:œcontextœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}","source":"Memory-amN4Z","sourceHandle":"{œdataTypeœ:œMemoryœ,œidœ:œMemory-amN4Zœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}","target":"Prompt-iWbCC","targetHandle":"{œfieldNameœ:œcontextœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}"},{"source":"Prompt-iWbCC","sourceHandle":"{œdataTypeœ:œPromptœ,œidœ:œPrompt-iWbCCœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-QA7ej","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-QA7ejœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-QA7ej","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Prompt","id":"Prompt-iWbCC","name":"prompt","output_types":["Message"]}},"id":"reactflow__edge-Prompt-iWbCC{œdataTypeœ:œPromptœ,œidœ:œPrompt-iWbCCœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-QA7ej{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-QA7ejœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"}],"viewport":{"x":-417.45799796990354,"y":3.1615551909424653,"zoom":0.45494095964690673}},"description":"This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.","name":"MemoryChatbotNoLLM","last_tested_version":"1.0.12","endpoint_name":null,"is_component":false} \ No newline at end of file +{ + "id": "26c412c9-9e4a-406d-aadb-ef9a81badb3f", + "data": { + "nodes": [ + { + "data": { + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "id": "Prompt-iWbCC", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "context", + "user_message" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt", + "documentation": "", + "edited": false, + "field_order": [ + "template" + ], + "frozen": false, + "icon": "prompts", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Prompt Message", + "method": "build_prompt", + "name": "prompt", + "selected": "Message", + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n" + }, + "context": { + "advanced": false, + "display_name": "context", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": [ + "Message", + "Text" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "context", + "password": false, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "load_from_db": false, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "type": "prompt", + "value": "{context}\n\nUser: {user_message}\nAI: " + }, + "user_message": { + "advanced": false, + "display_name": "user_message", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": [ + "Message", + "Text" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "user_message", + "password": false, + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + } + } + }, + "type": "Prompt" + }, + "dragging": false, + "height": 494, + "id": "Prompt-iWbCC", + "position": { + "x": 1880.8227904110583, + "y": 625.8049209882275 + }, + "positionAbsolute": { + "x": 1880.8227904110583, + "y": 625.8049209882275 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "id": "ChatInput-CIGht", + "node": { + "template": { + "_type": "Component", + "files": { + "trace_as_metadata": true, + "file_path": "", + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "list": true, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "files", + "display_name": "Files", + "advanced": true, + "dynamic": false, + "info": "Files to be sent with the message.", + "title_case": false, + "type": "file" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=\"User\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "trace_as_input": true, + "multiline": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "input_value", + "display_name": "Text", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Message to be passed as input.", + "title_case": false, + "type": "str" + }, + "sender": { + "trace_as_metadata": true, + "options": [ + "Machine", + "User" + ], + "required": false, + "placeholder": "", + "show": true, + "value": "User", + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "Type of sender.", + "title_case": false, + "type": "str" + }, + "sender_name": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "User", + "name": "sender_name", + "display_name": "Sender Name", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Name of the sender.", + "title_case": false, + "type": "str" + }, + "session_id": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Session ID for the message.", + "title_case": false, + "type": "str" + }, + "should_store_message": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": true, + "name": "should_store_message", + "display_name": "Store Messages", + "advanced": true, + "dynamic": false, + "info": "Store the message in the history.", + "title_case": false, + "type": "bool" + } + }, + "description": "Get chat inputs from the Playground.", + "icon": "ChatInput", + "base_classes": [ + "Message" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "message", + "display_name": "Message", + "method": "message_response", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files" + ], + "beta": false, + "edited": false + }, + "type": "ChatInput" + }, + "dragging": false, + "height": 294, + "id": "ChatInput-CIGht", + "position": { + "x": 1275.9262193671882, + "y": 836.1228056896347 + }, + "positionAbsolute": { + "x": 1275.9262193671882, + "y": 836.1228056896347 + }, + "selected": false, + "type": "genericNode", + "width": 384 + }, + { + "data": { + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "id": "ChatOutput-QA7ej", + "node": { + "template": { + "_type": "Component", + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\"],\n value=\"Machine\",\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\", display_name=\"Sender Name\", info=\"Name of the sender.\", value=\"AI\", advanced=True\n ),\n MessageTextInput(\n name=\"session_id\", display_name=\"Session ID\", info=\"Session ID for the message.\", advanced=True\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "data_template": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "{text}", + "name": "data_template", + "display_name": "Data Template", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "title_case": false, + "type": "str" + }, + "input_value": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "input_value", + "display_name": "Text", + "advanced": false, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Message to be passed as output.", + "title_case": false, + "type": "str" + }, + "sender": { + "trace_as_metadata": true, + "options": [ + "Machine", + "User" + ], + "required": false, + "placeholder": "", + "show": true, + "value": "Machine", + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "Type of sender.", + "title_case": false, + "type": "str" + }, + "sender_name": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "AI", + "name": "sender_name", + "display_name": "Sender Name", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Name of the sender.", + "title_case": false, + "type": "str" + }, + "session_id": { + "trace_as_input": true, + "trace_as_metadata": true, + "load_from_db": false, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": "", + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "input_types": [ + "Message" + ], + "dynamic": false, + "info": "Session ID for the message.", + "title_case": false, + "type": "str" + }, + "should_store_message": { + "trace_as_metadata": true, + "list": false, + "required": false, + "placeholder": "", + "show": true, + "value": true, + "name": "should_store_message", + "display_name": "Store Messages", + "advanced": true, + "dynamic": false, + "info": "Store the message in the history.", + "title_case": false, + "type": "bool" + } + }, + "description": "Display a chat message in the Playground.", + "icon": "ChatOutput", + "base_classes": [ + "Message" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "pinned": false, + "conditional_paths": [], + "frozen": false, + "outputs": [ + { + "types": [ + "Message" + ], + "selected": "Message", + "name": "message", + "display_name": "Message", + "method": "message_response", + "value": "__UNDEFINED__", + "cache": true + } + ], + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template" + ], + "beta": false, + "edited": false + }, + "type": "ChatOutput" + }, + "height": 294, + "id": "ChatOutput-QA7ej", + "position": { + "x": 2487.48936094892, + "y": 703.7197762654707 + }, + "selected": true, + "type": "genericNode", + "width": 384, + "positionAbsolute": { + "x": 2487.48936094892, + "y": 703.7197762654707 + }, + "dragging": true + }, + { + "data": { + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "id": "Memory-amN4Z", + "node": { + "base_classes": [ + "BaseChatMemory", + "Data", + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Retrieves stored chat messages from Langflow tables or an external memory.", + "display_name": "Chat Memory", + "documentation": "", + "edited": false, + "field_order": [ + "memory", + "sender", + "sender_name", + "n_messages", + "session_id", + "order", + "template" + ], + "frozen": false, + "icon": "message-square-more", + "output_types": [], + "outputs": [ + { + "cache": true, + "display_name": "Messages (Data)", + "method": "retrieve_messages", + "name": "messages", + "selected": "Data", + "types": [ + "Data" + ], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Messages (Text)", + "method": "retrieve_messages_as_text", + "name": "messages_text", + "selected": "Message", + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + }, + { + "cache": true, + "display_name": "Memory", + "method": "build_lc_memory", + "name": "lc_memory", + "selected": "BaseChatMemory", + "types": [ + "BaseChatMemory" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import get_messages, LCBuiltinChatMemory\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.field_typing import BaseChatMemory\nfrom langchain.memory import ConversationBufferMemory\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[\"Machine\", \"User\", \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"Session ID of the chat history.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n if sender:\n expected_type = \"Machine\" if sender == \"Machine\" else \"User\"\n stored = [m for m in stored if m.type == expected_type]\n if order == \"ASC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" + }, + "memory": { + "advanced": false, + "display_name": "External Memory", + "dynamic": false, + "info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", + "input_types": [ + "BaseChatMessageHistory" + ], + "list": false, + "name": "memory", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "n_messages": { + "advanced": true, + "display_name": "Number of Messages", + "dynamic": false, + "info": "Number of messages to retrieve.", + "list": false, + "name": "n_messages", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "int", + "value": 100 + }, + "order": { + "advanced": true, + "display_name": "Order", + "dynamic": false, + "info": "Order of the messages.", + "name": "order", + "options": [ + "Ascending", + "Descending" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Ascending" + }, + "sender": { + "advanced": true, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": [ + "Machine", + "User", + "Machine and User" + ], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine and User" + }, + "sender_name": { + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "session_id": { + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "Session ID of the chat history.", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "template": { + "advanced": true, + "display_name": "Template", + "dynamic": false, + "info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{sender_name}: {text}" + } + } + }, + "type": "Memory" + }, + "dragging": false, + "height": 366, + "id": "Memory-amN4Z", + "position": { + "x": 1308.5775646859402, + "y": 406.95204412025845 + }, + "positionAbsolute": { + "x": 1308.5775646859402, + "y": 406.95204412025845 + }, + "selected": false, + "type": "genericNode", + "width": 384 + } + ], + "edges": [ + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-CIGht", + "name": "message", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "user_message", + "id": "Prompt-iWbCC", + "inputTypes": [ + "Message", + "Text" + ], + "type": "str" + } + }, + "id": "reactflow__edge-ChatInput-CIGht{œdataTypeœ:œChatInputœ,œidœ:œChatInput-CIGhtœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-iWbCC{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "ChatInput-CIGht", + "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-CIGhtœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt-iWbCC", + "targetHandle": "{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "className": "", + "data": { + "sourceHandle": { + "dataType": "Memory", + "id": "Memory-amN4Z", + "name": "messages_text", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "context", + "id": "Prompt-iWbCC", + "inputTypes": [ + "Message", + "Text" + ], + "type": "str" + } + }, + "id": "reactflow__edge-Memory-amN4Z{œdataTypeœ:œMemoryœ,œidœ:œMemory-amN4Zœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-iWbCC{œfieldNameœ:œcontextœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", + "source": "Memory-amN4Z", + "sourceHandle": "{œdataTypeœ:œMemoryœ,œidœ:œMemory-amN4Zœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt-iWbCC", + "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-iWbCCœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-iWbCC", + "sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-iWbCCœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", + "target": "ChatOutput-QA7ej", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-QA7ejœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-QA7ej", + "inputTypes": [ + "Message" + ], + "type": "str" + }, + "sourceHandle": { + "dataType": "Prompt", + "id": "Prompt-iWbCC", + "name": "prompt", + "output_types": [ + "Message" + ] + } + }, + "id": "reactflow__edge-Prompt-iWbCC{œdataTypeœ:œPromptœ,œidœ:œPrompt-iWbCCœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-QA7ej{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-QA7ejœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": -417.45799796990354, + "y": 3.1615551909424653, + "zoom": 0.45494095964690673 + } + }, + "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", + "name": "MemoryChatbotNoLLM", + "last_tested_version": "1.0.12", + "endpoint_name": null, + "is_component": false +} \ No newline at end of file diff --git a/src/backend/tests/data/__init__.py b/src/backend/tests/data/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/data/component.py b/src/backend/tests/data/component.py index 5de63fcd5f40..e16ea3b4d2d5 100644 --- a/src/backend/tests/data/component.py +++ b/src/backend/tests/data/component.py @@ -7,7 +7,7 @@ class TestComponent(CustomComponent): def refresh_values(self): # This is a function that will be called every time the component is updated # and should return a list of random strings - return [f"Random {random.randint(1, 100)}" for _ in range(5)] + return [f"Random {random.randint(1, 100)}" for _ in range(5)] # noqa: S311 def build_config(self): return {"param": {"display_name": "Param", "options": self.refresh_values}} diff --git a/src/backend/tests/data/component_nested_call.py b/src/backend/tests/data/component_nested_call.py index 5dd61c2bab33..526d7cc88f9f 100644 --- a/src/backend/tests/data/component_nested_call.py +++ b/src/backend/tests/data/component_nested_call.py @@ -16,7 +16,7 @@ class MultipleOutputsComponent(Component): ] def certain_output(self) -> int: - return randint(0, self.number) + return randint(0, self.number) # noqa: S311 def other_output(self) -> int: return self.certain_output() diff --git a/src/backend/tests/data/component_with_templatefield.py b/src/backend/tests/data/component_with_templatefield.py index bc79c80d2ef1..cde77f717557 100644 --- a/src/backend/tests/data/component_with_templatefield.py +++ b/src/backend/tests/data/component_with_templatefield.py @@ -8,7 +8,7 @@ class TestComponent(CustomComponent): def refresh_values(self): # This is a function that will be called every time the component is updated # and should return a list of random strings - return [f"Random {random.randint(1, 100)}" for _ in range(5)] + return [f"Random {random.randint(1, 100)}" for _ in range(5)] # noqa: S311 def build_config(self): return {"param": Input(display_name="Param", options=self.refresh_values)} diff --git a/src/backend/tests/data/dynamic_output_component.py b/src/backend/tests/data/dynamic_output_component.py new file mode 100644 index 000000000000..b2a23aa37b18 --- /dev/null +++ b/src/backend/tests/data/dynamic_output_component.py @@ -0,0 +1,41 @@ +# from langflow.field_typing import Data +from typing import Any + +from langflow.custom import Component +from langflow.io import BoolInput, MessageTextInput, Output +from langflow.schema import Data + + +class DynamicOutputComponent(Component): + display_name = "Dynamic Output Component" + description = "Use as a template to create your own component." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "custom_components" + name = "DynamicOutputComponent" + + inputs = [ + MessageTextInput(name="input_value", display_name="Input Value", value="Hello, World!"), + BoolInput(name="show_output", display_name="Show Output", value=True, real_time_refresh=True), + ] + + outputs = [ + Output(display_name="Output", name="output", method="build_output"), + ] + + def update_outputs(self, frontend_node: dict, field_name: str, field_value: Any): + if field_name == "show_output": + if field_value: + frontend_node["outputs"].append( + Output(display_name="Tool Output", name="tool_output", method="build_output") + ) + else: + # remove the output + frontend_node["outputs"] = [ + output for output in frontend_node["outputs"] if output["name"] != "tool_output" + ] + return frontend_node + + def build_output(self) -> Data: + data = Data(value=self.input_value) + self.status = data + return data diff --git a/src/backend/tests/integration/astra/test_astra_component.py b/src/backend/tests/integration/astra/test_astra_component.py deleted file mode 100644 index 2742b0e3bd2c..000000000000 --- a/src/backend/tests/integration/astra/test_astra_component.py +++ /dev/null @@ -1,240 +0,0 @@ -import os - -import pytest -from integration.utils import MockEmbeddings, check_env_vars, valid_nvidia_vectorize_region -from langchain_core.documents import Document - -# from langflow.components.memories.AstraDBMessageReader import AstraDBMessageReaderComponent -# from langflow.components.memories.AstraDBMessageWriter import AstraDBMessageWriterComponent -from langflow.components.vectorstores.AstraDB import AstraVectorStoreComponent -from langflow.schema.data import Data - -COLLECTION = "test_basic" -SEARCH_COLLECTION = "test_search" -# MEMORY_COLLECTION = "test_memory" -VECTORIZE_COLLECTION = "test_vectorize" -VECTORIZE_COLLECTION_OPENAI = "test_vectorize_openai" -VECTORIZE_COLLECTION_OPENAI_WITH_AUTH = "test_vectorize_openai_auth" - - -@pytest.fixture() -def astra_fixture(request): - """ - Sets up the astra collection and cleans up after - """ - try: - from langchain_astradb import AstraDBVectorStore - except ImportError: - raise ImportError( - "Could not import langchain Astra DB integration package. Please install it with `pip install langchain-astradb`." - ) - - store = AstraDBVectorStore( - collection_name=request.param, - embedding=MockEmbeddings(), - api_endpoint=os.getenv("ASTRA_DB_API_ENDPOINT"), - token=os.getenv("ASTRA_DB_APPLICATION_TOKEN"), - ) - - yield - - store.delete_collection() - - -@pytest.mark.skipif( - not check_env_vars("ASTRA_DB_APPLICATION_TOKEN", "ASTRA_DB_API_ENDPOINT"), - reason="missing astra env vars", -) -@pytest.mark.parametrize("astra_fixture", [COLLECTION], indirect=True) -def test_astra_setup(astra_fixture): - application_token = os.getenv("ASTRA_DB_APPLICATION_TOKEN") - api_endpoint = os.getenv("ASTRA_DB_API_ENDPOINT") - embedding = MockEmbeddings() - - component = AstraVectorStoreComponent() - component.build( - token=application_token, - api_endpoint=api_endpoint, - collection_name=COLLECTION, - embedding=embedding, - ) - component.build_vector_store() - - -@pytest.mark.skipif( - not check_env_vars("ASTRA_DB_APPLICATION_TOKEN", "ASTRA_DB_API_ENDPOINT"), - reason="missing astra env vars", -) -@pytest.mark.parametrize("astra_fixture", [SEARCH_COLLECTION], indirect=True) -def test_astra_embeds_and_search(astra_fixture): - application_token = os.getenv("ASTRA_DB_APPLICATION_TOKEN") - api_endpoint = os.getenv("ASTRA_DB_API_ENDPOINT") - embedding = MockEmbeddings() - - documents = [Document(page_content="test1"), Document(page_content="test2")] - records = [Data.from_document(d) for d in documents] - - component = AstraVectorStoreComponent() - component.build( - token=application_token, - api_endpoint=api_endpoint, - collection_name=SEARCH_COLLECTION, - embedding=embedding, - ingest_data=records, - search_input="test1", - number_of_results=1, - ) - component.build_vector_store() - records = component.search_documents() - - assert len(records) == 1 - - -@pytest.mark.skipif( - not check_env_vars("ASTRA_DB_APPLICATION_TOKEN", "ASTRA_DB_API_ENDPOINT") - or not valid_nvidia_vectorize_region(os.getenv("ASTRA_DB_API_ENDPOINT")), - reason="missing env vars or invalid region for nvidia vectorize", -) -def test_astra_vectorize(): - from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions - - from langflow.components.embeddings.AstraVectorize import AstraVectorizeComponent - - store = None - try: - options = {"provider": "nvidia", "modelName": "NV-Embed-QA"} - store = AstraDBVectorStore( - collection_name=VECTORIZE_COLLECTION, - api_endpoint=os.getenv("ASTRA_DB_API_ENDPOINT"), - token=os.getenv("ASTRA_DB_APPLICATION_TOKEN"), - collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), - ) - - application_token = os.getenv("ASTRA_DB_APPLICATION_TOKEN") - api_endpoint = os.getenv("ASTRA_DB_API_ENDPOINT") - - documents = [Document(page_content="test1"), Document(page_content="test2")] - records = [Data.from_document(d) for d in documents] - - vectorize = AstraVectorizeComponent() - vectorize.build(provider="NVIDIA", model_name="NV-Embed-QA") - vectorize_options = vectorize.build_options() - - component = AstraVectorStoreComponent() - component.build( - token=application_token, - api_endpoint=api_endpoint, - collection_name=VECTORIZE_COLLECTION, - ingest_data=records, - embedding=vectorize_options, - search_input="test", - number_of_results=2, - ) - component.build_vector_store() - records = component.search_documents() - - assert len(records) == 2 - finally: - if store is not None: - store.delete_collection() - - -@pytest.mark.skipif( - not check_env_vars("ASTRA_DB_APPLICATION_TOKEN", "ASTRA_DB_API_ENDPOINT", "OPENAI_API_KEY"), - reason="missing env vars", -) -def test_astra_vectorize_with_provider_api_key(): - """tests vectorize using an openai api key""" - from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions - - from langflow.components.embeddings.AstraVectorize import AstraVectorizeComponent - - store = None - try: - application_token = os.getenv("ASTRA_DB_APPLICATION_TOKEN") - api_endpoint = os.getenv("ASTRA_DB_API_ENDPOINT") - options = {"provider": "openai", "modelName": "text-embedding-3-small", "parameters": {}, "authentication": {}} - store = AstraDBVectorStore( - collection_name=VECTORIZE_COLLECTION_OPENAI, - api_endpoint=api_endpoint, - token=application_token, - collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), - collection_embedding_api_key=os.getenv("OPENAI_API_KEY"), - ) - documents = [Document(page_content="test1"), Document(page_content="test2")] - records = [Data.from_document(d) for d in documents] - - vectorize = AstraVectorizeComponent() - vectorize.build( - provider="OpenAI", model_name="text-embedding-3-small", provider_api_key=os.getenv("OPENAI_API_KEY") - ) - vectorize_options = vectorize.build_options() - - component = AstraVectorStoreComponent() - component.build( - token=application_token, - api_endpoint=api_endpoint, - collection_name=VECTORIZE_COLLECTION_OPENAI, - ingest_data=records, - embedding=vectorize_options, - search_input="test", - number_of_results=4, - ) - component.build_vector_store() - records = component.search_documents() - assert len(records) == 2 - finally: - if store is not None: - store.delete_collection() - - -@pytest.mark.skipif( - not check_env_vars("ASTRA_DB_APPLICATION_TOKEN", "ASTRA_DB_API_ENDPOINT"), - reason="missing env vars", -) -def test_astra_vectorize_passes_authentication(): - """tests vectorize using the authentication parameter""" - from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions - - from langflow.components.embeddings.AstraVectorize import AstraVectorizeComponent - - store = None - try: - application_token = os.getenv("ASTRA_DB_APPLICATION_TOKEN") - api_endpoint = os.getenv("ASTRA_DB_API_ENDPOINT") - options = { - "provider": "openai", - "modelName": "text-embedding-3-small", - "parameters": {}, - "authentication": {"providerKey": "apikey"}, - } - store = AstraDBVectorStore( - collection_name=VECTORIZE_COLLECTION_OPENAI_WITH_AUTH, - api_endpoint=api_endpoint, - token=application_token, - collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), - ) - documents = [Document(page_content="test1"), Document(page_content="test2")] - records = [Data.from_document(d) for d in documents] - - vectorize = AstraVectorizeComponent() - vectorize.build( - provider="OpenAI", model_name="text-embedding-3-small", authentication={"providerKey": "apikey"} - ) - vectorize_options = vectorize.build_options() - - component = AstraVectorStoreComponent() - component.build( - token=application_token, - api_endpoint=api_endpoint, - collection_name=VECTORIZE_COLLECTION_OPENAI_WITH_AUTH, - ingest_data=records, - embedding=vectorize_options, - search_input="test", - ) - component.build_vector_store() - records = component.search_documents() - assert len(records) == 2 - finally: - if store is not None: - store.delete_collection() diff --git a/src/backend/tests/integration/backward_compatibility/__init__.py b/src/backend/tests/integration/backward_compatibility/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/backward_compatibility/test_starter_projects.py b/src/backend/tests/integration/backward_compatibility/test_starter_projects.py new file mode 100644 index 000000000000..a7e7d9059c0e --- /dev/null +++ b/src/backend/tests/integration/backward_compatibility/test_starter_projects.py @@ -0,0 +1,16 @@ +import pytest +from langflow.schema.message import Message + +from tests.api_keys import get_openai_api_key +from tests.integration.utils import download_flow_from_github, run_json_flow + + +@pytest.mark.api_key_required +async def test_1_0_15_basic_prompting(): + api_key = get_openai_api_key() + json_flow = download_flow_from_github("Basic Prompting (Hello, World)", "1.0.15") + json_flow.set_value(json_flow.get_component_by_type("OpenAIModel"), "api_key", api_key) + outputs = await run_json_flow(json_flow, run_input="my name is bob, say hello!") + assert isinstance(outputs["message"], Message) + response = outputs["message"].text.lower() + assert "arr" in response or "ahoy" in response diff --git a/src/backend/tests/integration/components/__init__.py b/src/backend/tests/integration/components/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/assistants/__init__.py b/src/backend/tests/integration/components/assistants/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/assistants/test_assistants_components.py b/src/backend/tests/integration/components/assistants/test_assistants_components.py new file mode 100644 index 000000000000..d808d3de6d4c --- /dev/null +++ b/src/backend/tests/integration/components/assistants/test_assistants_components.py @@ -0,0 +1,73 @@ +import pytest + +from tests.integration.utils import run_single_component + + +@pytest.mark.api_key_required +async def test_list_assistants(): + from langflow.components.astra_assistants import AssistantsListAssistants + + results = await run_single_component( + AssistantsListAssistants, + inputs={}, + ) + assert results["assistants"].text is not None + + +@pytest.mark.api_key_required +async def test_create_assistants(): + from langflow.components.astra_assistants import AssistantsCreateAssistant + + results = await run_single_component( + AssistantsCreateAssistant, + inputs={ + "assistant_name": "artist-bot", + "instructions": "reply only with ascii art", + "model": "gpt-4o-mini", + }, + ) + assistant_id = results["assistant_id"].text + assert assistant_id is not None + await test_list_assistants() + await get_assistant_name(assistant_id) + thread_id = await test_create_thread() + await run_assistant(assistant_id, thread_id) + + +@pytest.mark.api_key_required +async def test_create_thread(): + from langflow.components.astra_assistants import AssistantsCreateThread + + results = await run_single_component( + AssistantsCreateThread, + inputs={}, + ) + thread_id = results["thread_id"].text + assert thread_id is not None + return thread_id + + +async def get_assistant_name(assistant_id): + from langflow.components.astra_assistants import AssistantsGetAssistantName + + results = await run_single_component( + AssistantsGetAssistantName, + inputs={ + "assistant_id": assistant_id, + }, + ) + assert results["assistant_name"].text is not None + + +async def run_assistant(assistant_id, thread_id): + from langflow.components.astra_assistants import AssistantsRun + + results = await run_single_component( + AssistantsRun, + inputs={ + "assistant_id": assistant_id, + "user_message": "hello", + "thread_id": thread_id, + }, + ) + assert results["assistant_response"].text is not None diff --git a/src/backend/tests/integration/components/astra/__init__.py b/src/backend/tests/integration/components/astra/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/astra/test_astra_component.py b/src/backend/tests/integration/components/astra/test_astra_component.py new file mode 100644 index 000000000000..866898ed6a99 --- /dev/null +++ b/src/backend/tests/integration/components/astra/test_astra_component.py @@ -0,0 +1,244 @@ +import os + +import pytest +from astrapy.db import AstraDB +from langchain_core.documents import Document +from langflow.components.embeddings import OpenAIEmbeddingsComponent +from langflow.components.vectorstores import AstraVectorStoreComponent +from langflow.schema.data import Data + +from tests.api_keys import get_astradb_api_endpoint, get_astradb_application_token, get_openai_api_key +from tests.integration.components.mock_components import TextToData +from tests.integration.utils import ComponentInputHandle, run_single_component + +BASIC_COLLECTION = "test_basic" +SEARCH_COLLECTION = "test_search" +# MEMORY_COLLECTION = "test_memory" +VECTORIZE_COLLECTION = "test_vectorize" +VECTORIZE_COLLECTION_OPENAI = "test_vectorize_openai" +VECTORIZE_COLLECTION_OPENAI_WITH_AUTH = "test_vectorize_openai_auth" +ALL_COLLECTIONS = [ + BASIC_COLLECTION, + SEARCH_COLLECTION, + # MEMORY_COLLECTION, + VECTORIZE_COLLECTION, + VECTORIZE_COLLECTION_OPENAI, + VECTORIZE_COLLECTION_OPENAI_WITH_AUTH, +] + + +@pytest.fixture +def astradb_client(): + client = AstraDB(api_endpoint=get_astradb_api_endpoint(), token=get_astradb_application_token()) + yield client + for collection in ALL_COLLECTIONS: + client.delete_collection(collection) + + +@pytest.mark.api_key_required +async def test_base(astradb_client: AstraDB): + from langflow.components.embeddings import OpenAIEmbeddingsComponent + + application_token = get_astradb_application_token() + api_endpoint = get_astradb_api_endpoint() + + results = await run_single_component( + AstraVectorStoreComponent, + inputs={ + "token": application_token, + "api_endpoint": api_endpoint, + "collection_name": BASIC_COLLECTION, + "embedding": ComponentInputHandle( + clazz=OpenAIEmbeddingsComponent, + inputs={"openai_api_key": get_openai_api_key()}, + output_name="embeddings", + ), + }, + ) + from langchain_core.vectorstores import VectorStoreRetriever + + assert isinstance(results["base_retriever"], VectorStoreRetriever) + assert results["vector_store"] is not None + assert results["search_results"] == [] + assert astradb_client.collection(BASIC_COLLECTION) + + +@pytest.mark.api_key_required +async def test_astra_embeds_and_search(): + application_token = get_astradb_application_token() + api_endpoint = get_astradb_api_endpoint() + + results = await run_single_component( + AstraVectorStoreComponent, + inputs={ + "token": application_token, + "api_endpoint": api_endpoint, + "collection_name": BASIC_COLLECTION, + "number_of_results": 1, + "search_input": "test1", + "ingest_data": ComponentInputHandle( + clazz=TextToData, inputs={"text_data": ["test1", "test2"]}, output_name="from_text" + ), + "embedding": ComponentInputHandle( + clazz=OpenAIEmbeddingsComponent, + inputs={"openai_api_key": get_openai_api_key()}, + output_name="embeddings", + ), + }, + ) + assert len(results["search_results"]) == 1 + + +@pytest.mark.api_key_required +def test_astra_vectorize(): + from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions + + application_token = get_astradb_application_token() + api_endpoint = get_astradb_api_endpoint() + + store = None + try: + options = {"provider": "nvidia", "modelName": "NV-Embed-QA"} + options_comp = {"embedding_provider": "nvidia", "model": "NV-Embed-QA"} + + store = AstraDBVectorStore( + collection_name=VECTORIZE_COLLECTION, + api_endpoint=api_endpoint, + token=application_token, + collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), + ) + + documents = [Document(page_content="test1"), Document(page_content="test2")] + records = [Data.from_document(d) for d in documents] + + component = AstraVectorStoreComponent() + vectorize_options = component.build_vectorize_options(**options_comp) + + component.build( + token=application_token, + api_endpoint=api_endpoint, + collection_name=VECTORIZE_COLLECTION, + ingest_data=records, + search_input="test", + number_of_results=2, + pre_delete_collection=True, + ) + vector_store = component.build_vector_store(vectorize_options) + records = component.search_documents(vector_store=vector_store) + + assert len(records) == 2 + finally: + if store is not None: + store.delete_collection() + + +@pytest.mark.api_key_required +def test_astra_vectorize_with_provider_api_key(): + """Tests vectorize using an openai api key.""" + from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions + + application_token = get_astradb_application_token() + api_endpoint = get_astradb_api_endpoint() + + store = None + try: + options = { + "provider": "openai", + "modelName": "text-embedding-3-small", + "parameters": {}, + "authentication": {"providerKey": "openai"}, + } + + options_comp = { + "embedding_provider": "openai", + "model": "text-embedding-3-small", + "z_01_model_parameters": {}, + "z_03_provider_api_key": "openai", + "z_04_authentication": {}, + } + + store = AstraDBVectorStore( + collection_name=VECTORIZE_COLLECTION_OPENAI, + api_endpoint=api_endpoint, + token=application_token, + collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), + collection_embedding_api_key=os.getenv("OPENAI_API_KEY"), + ) + documents = [Document(page_content="test1"), Document(page_content="test2")] + records = [Data.from_document(d) for d in documents] + + component = AstraVectorStoreComponent() + vectorize_options = component.build_vectorize_options(**options_comp) + + component.build( + token=application_token, + api_endpoint=api_endpoint, + collection_name=VECTORIZE_COLLECTION_OPENAI, + ingest_data=records, + search_input="test", + number_of_results=2, + pre_delete_collection=True, + ) + + vector_store = component.build_vector_store(vectorize_options) + records = component.search_documents(vector_store=vector_store) + + assert len(records) == 2 + finally: + if store is not None: + store.delete_collection() + + +@pytest.mark.api_key_required +def test_astra_vectorize_passes_authentication(): + """Tests vectorize using the authentication parameter.""" + from langchain_astradb import AstraDBVectorStore, CollectionVectorServiceOptions + + store = None + try: + application_token = get_astradb_application_token() + api_endpoint = get_astradb_api_endpoint() + + options = { + "provider": "openai", + "modelName": "text-embedding-3-small", + "parameters": {}, + "authentication": {"providerKey": "openai"}, + } + options_comp = { + "embedding_provider": "openai", + "model": "text-embedding-3-small", + "z_01_model_parameters": {}, + "z_04_authentication": {"providerKey": "openai"}, + } + + store = AstraDBVectorStore( + collection_name=VECTORIZE_COLLECTION_OPENAI_WITH_AUTH, + api_endpoint=api_endpoint, + token=application_token, + collection_vector_service_options=CollectionVectorServiceOptions.from_dict(options), + ) + + documents = [Document(page_content="test1"), Document(page_content="test2")] + records = [Data.from_document(d) for d in documents] + + component = AstraVectorStoreComponent() + vectorize_options = component.build_vectorize_options(**options_comp) + + component.build( + token=application_token, + api_endpoint=api_endpoint, + collection_name=VECTORIZE_COLLECTION_OPENAI_WITH_AUTH, + ingest_data=records, + search_input="test", + number_of_results=2, + pre_delete_collection=True, + ) + + vector_store = component.build_vector_store(vectorize_options) + records = component.search_documents(vector_store=vector_store) + + assert len(records) == 2 + finally: + if store is not None: + store.delete_collection() diff --git a/src/backend/tests/integration/components/helpers/__init__.py b/src/backend/tests/integration/components/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/helpers/test_parse_json_data.py b/src/backend/tests/integration/components/helpers/test_parse_json_data.py new file mode 100644 index 000000000000..63820d28b622 --- /dev/null +++ b/src/backend/tests/integration/components/helpers/test_parse_json_data.py @@ -0,0 +1,54 @@ +from langflow.components.inputs import ChatInput +from langflow.components.processing.parse_json_data import ParseJSONDataComponent +from langflow.schema import Data + +from tests.integration.components.mock_components import TextToData +from tests.integration.utils import ComponentInputHandle, run_single_component + + +async def test_from_data(): + outputs = await run_single_component( + ParseJSONDataComponent, + inputs={ + "input_value": ComponentInputHandle( + clazz=TextToData, inputs={"text_data": ['{"key":"value1"}'], "is_json": True}, output_name="from_text" + ), + "query": ".[0].key", + }, + ) + assert outputs["filtered_data"] == [Data(text="value1")] + + outputs = await run_single_component( + ParseJSONDataComponent, + inputs={ + "input_value": ComponentInputHandle( + clazz=TextToData, + inputs={"text_data": ['{"key":[{"field1": 1, "field2": 2}]}'], "is_json": True}, + output_name="from_text", + ), + "query": ".[0].key.[0].field2", + }, + ) + assert outputs["filtered_data"] == [Data(text="2")] + + +async def test_from_message(): + outputs = await run_single_component( + ParseJSONDataComponent, + inputs={ + "input_value": ComponentInputHandle(clazz=ChatInput, inputs={}, output_name="message"), + "query": ".key", + }, + run_input="{'key':'value1'}", + ) + assert outputs["filtered_data"] == [Data(text="value1")] + + outputs = await run_single_component( + ParseJSONDataComponent, + inputs={ + "input_value": ComponentInputHandle(clazz=ChatInput, inputs={}, output_name="message"), + "query": ".key.[0].field2", + }, + run_input='{"key":[{"field1": 1, "field2": 2}]}', + ) + assert outputs["filtered_data"] == [Data(text="2")] diff --git a/src/backend/tests/integration/components/inputs/__init__.py b/src/backend/tests/integration/components/inputs/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/inputs/test_chat_input.py b/src/backend/tests/integration/components/inputs/test_chat_input.py new file mode 100644 index 000000000000..698e8d266878 --- /dev/null +++ b/src/backend/tests/integration/components/inputs/test_chat_input.py @@ -0,0 +1,51 @@ +from langflow.components.inputs import ChatInput +from langflow.memory import get_messages +from langflow.schema.message import Message + +from tests.integration.utils import run_single_component + + +async def test_default(): + outputs = await run_single_component(ChatInput, run_input="hello") + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].sender == "User" + assert outputs["message"].sender_name == "User" + + outputs = await run_single_component(ChatInput, run_input="") + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "" + assert outputs["message"].sender == "User" + assert outputs["message"].sender_name == "User" + + +async def test_sender(): + outputs = await run_single_component( + ChatInput, inputs={"sender": "Machine", "sender_name": "AI"}, run_input="hello" + ) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].sender == "Machine" + assert outputs["message"].sender_name == "AI" + + +async def test_do_not_store_messages(): + session_id = "test-session-id" + outputs = await run_single_component( + ChatInput, inputs={"should_store_message": True}, run_input="hello", session_id=session_id + ) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].session_id == session_id + + assert len(get_messages(session_id=session_id)) == 1 + + session_id = "test-session-id-another" + outputs = await run_single_component( + ChatInput, inputs={"should_store_message": False}, run_input="hello", session_id=session_id + ) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].session_id == session_id + + assert len(get_messages(session_id=session_id)) == 0 diff --git a/src/backend/tests/integration/components/inputs/test_text_input.py b/src/backend/tests/integration/components/inputs/test_text_input.py new file mode 100644 index 000000000000..3668681392a3 --- /dev/null +++ b/src/backend/tests/integration/components/inputs/test_text_input.py @@ -0,0 +1,18 @@ +from langflow.components.inputs import TextInputComponent +from langflow.schema.message import Message + +from tests.integration.utils import run_single_component + + +async def test_text_input(): + outputs = await run_single_component(TextInputComponent, run_input="sample text", input_type="text") + assert isinstance(outputs["text"], Message) + assert outputs["text"].text == "sample text" + assert outputs["text"].sender is None + assert outputs["text"].sender_name is None + + outputs = await run_single_component(TextInputComponent, run_input="", input_type="text") + assert isinstance(outputs["text"], Message) + assert outputs["text"].text == "" + assert outputs["text"].sender is None + assert outputs["text"].sender_name is None diff --git a/src/backend/tests/integration/components/mock_components.py b/src/backend/tests/integration/components/mock_components.py new file mode 100644 index 000000000000..2bf304304e45 --- /dev/null +++ b/src/backend/tests/integration/components/mock_components.py @@ -0,0 +1,24 @@ +import json + +from langflow.custom import Component +from langflow.inputs import BoolInput, StrInput +from langflow.schema import Data +from langflow.template import Output + + +class TextToData(Component): + inputs = [ + StrInput(name="text_data", is_list=True), + BoolInput(name="is_json", info="Parse text_data as json and fill the data object."), + ] + outputs = [ + Output(name="from_text", display_name="From text", method="create_data"), + ] + + def _to_data(self, text: str) -> Data: + if self.is_json: + return Data(data=json.loads(text)) + return Data(text=text) + + def create_data(self) -> list[Data]: + return [self._to_data(t) for t in self.text_data] diff --git a/src/backend/tests/integration/components/output_parsers/__init__.py b/src/backend/tests/integration/components/output_parsers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/output_parsers/test_output_parser.py b/src/backend/tests/integration/components/output_parsers/test_output_parser.py new file mode 100644 index 000000000000..57322a7dace2 --- /dev/null +++ b/src/backend/tests/integration/components/output_parsers/test_output_parser.py @@ -0,0 +1,40 @@ +import os + +import pytest +from langflow.components.helpers import OutputParserComponent +from langflow.components.models import OpenAIModelComponent +from langflow.components.prompts import PromptComponent + +from tests.integration.utils import ComponentInputHandle, run_single_component + + +@pytest.mark.api_key_required +async def test_csv_output_parser_openai(): + format_instructions = ComponentInputHandle( + clazz=OutputParserComponent, + inputs={}, + output_name="format_instructions", + ) + output_parser_handle = ComponentInputHandle( + clazz=OutputParserComponent, + inputs={}, + output_name="output_parser", + ) + prompt_handler = ComponentInputHandle( + clazz=PromptComponent, + inputs={ + "template": "List the first five positive integers.\n\n{format_instructions}", + "format_instructions": format_instructions, + }, + output_name="prompt", + ) + + outputs = await run_single_component( + OpenAIModelComponent, + inputs={ + "api_key": os.environ["OPENAI_API_KEY"], + "output_parser": output_parser_handle, + "input_value": prompt_handler, + }, + ) + assert outputs["text_output"] == "1, 2, 3, 4, 5" diff --git a/src/backend/tests/integration/components/outputs/__init__.py b/src/backend/tests/integration/components/outputs/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/outputs/test_chat_output.py b/src/backend/tests/integration/components/outputs/test_chat_output.py new file mode 100644 index 000000000000..dfe113cc9890 --- /dev/null +++ b/src/backend/tests/integration/components/outputs/test_chat_output.py @@ -0,0 +1,41 @@ +from langflow.components.outputs import ChatOutput +from langflow.memory import get_messages +from langflow.schema.message import Message + +from tests.integration.utils import run_single_component + + +async def test_string(): + outputs = await run_single_component(ChatOutput, inputs={"input_value": "hello"}) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].sender == "Machine" + assert outputs["message"].sender_name == "AI" + + +async def test_message(): + outputs = await run_single_component(ChatOutput, inputs={"input_value": Message(text="hello")}) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + assert outputs["message"].sender == "Machine" + assert outputs["message"].sender_name == "AI" + + +async def test_do_not_store_message(): + session_id = "test-session-id" + outputs = await run_single_component( + ChatOutput, inputs={"input_value": Message(text="hello"), "should_store_message": True}, session_id=session_id + ) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + + assert len(get_messages(session_id=session_id)) == 1 + session_id = "test-session-id-another" + + outputs = await run_single_component( + ChatOutput, inputs={"input_value": Message(text="hello"), "should_store_message": False}, session_id=session_id + ) + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "hello" + + assert len(get_messages(session_id=session_id)) == 0 diff --git a/src/backend/tests/integration/components/outputs/test_text_output.py b/src/backend/tests/integration/components/outputs/test_text_output.py new file mode 100644 index 000000000000..a79977a74304 --- /dev/null +++ b/src/backend/tests/integration/components/outputs/test_text_output.py @@ -0,0 +1,20 @@ +from langflow.components.outputs import TextOutputComponent +from langflow.schema.message import Message + +from tests.integration.utils import run_single_component + + +async def test(): + outputs = await run_single_component(TextOutputComponent, inputs={"input_value": "hello"}) + assert isinstance(outputs["text"], Message) + assert outputs["text"].text == "hello" + assert outputs["text"].sender is None + assert outputs["text"].sender_name is None + + +async def test_message(): + outputs = await run_single_component(TextOutputComponent, inputs={"input_value": Message(text="hello")}) + assert isinstance(outputs["text"], Message) + assert outputs["text"].text == "hello" + assert outputs["text"].sender is None + assert outputs["text"].sender_name is None diff --git a/src/backend/tests/integration/components/prompts/__init__.py b/src/backend/tests/integration/components/prompts/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/components/prompts/test_prompt.py b/src/backend/tests/integration/components/prompts/test_prompt.py new file mode 100644 index 000000000000..41c61c93f4bf --- /dev/null +++ b/src/backend/tests/integration/components/prompts/test_prompt.py @@ -0,0 +1,10 @@ +from langflow.components.prompts import PromptComponent +from langflow.schema.message import Message + +from tests.integration.utils import run_single_component + + +async def test(): + outputs = await run_single_component(PromptComponent, inputs={"template": "test {var1}", "var1": "from the var"}) + assert isinstance(outputs["prompt"], Message) + assert outputs["prompt"].text == "test from the var" diff --git a/src/backend/tests/integration/conftest.py b/src/backend/tests/integration/conftest.py new file mode 100644 index 000000000000..d8dcd30aaa26 --- /dev/null +++ b/src/backend/tests/integration/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture(autouse=True) +def _start_app(client): + pass diff --git a/src/backend/tests/integration/flows/__init__.py b/src/backend/tests/integration/flows/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/integration/flows/test_basic_prompting.py b/src/backend/tests/integration/flows/test_basic_prompting.py new file mode 100644 index 000000000000..9f66e894b648 --- /dev/null +++ b/src/backend/tests/integration/flows/test_basic_prompting.py @@ -0,0 +1,20 @@ +from langflow.components.inputs import ChatInput +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.graph import Graph +from langflow.schema.message import Message + +from tests.integration.utils import run_flow + + +async def test_simple_no_llm(): + graph = Graph() + flow_input = graph.add_component(ChatInput()) + flow_output = graph.add_component(ChatOutput()) + component = PromptComponent(template="This is the message: {var1}", var1="") + prompt = graph.add_component(component) + graph.add_component_edge(flow_input, ("message", "var1"), prompt) + graph.add_component_edge(prompt, ("prompt", "input_value"), flow_output) + outputs = await run_flow(graph, run_input="hello!") + assert isinstance(outputs["message"], Message) + assert outputs["message"].text == "This is the message: hello!" diff --git a/src/backend/tests/integration/langflow.py b/src/backend/tests/integration/langflow.py deleted file mode 100644 index 8b83bc1ca3f0..000000000000 --- a/src/backend/tests/integration/langflow.py +++ /dev/null @@ -1,88 +0,0 @@ -from uuid import uuid4 - -import pytest -from fastapi import status -from fastapi.testclient import TestClient - -from langflow.graph.schema import RunOutputs -from langflow.initial_setup.setup import load_starter_projects -from langflow.load import run_flow_from_json - - -@pytest.mark.api_key_required -def test_run_flow_with_caching_success(client: TestClient, starter_project, created_api_key): - flow_id = starter_project["id"] - headers = {"x-api-key": created_api_key.api_key} - payload = { - "input_value": "value1", - "input_type": "text", - "output_type": "text", - "tweaks": {"parameter_name": "value"}, - "stream": False, - } - response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) - assert response.status_code == status.HTTP_200_OK - data = response.json() - assert "outputs" in data - assert "session_id" in data - - -@pytest.mark.api_key_required -def test_run_flow_with_caching_invalid_flow_id(client: TestClient, created_api_key): - invalid_flow_id = uuid4() - headers = {"x-api-key": created_api_key.api_key} - payload = {"input_value": "", "input_type": "text", "output_type": "text", "tweaks": {}, "stream": False} - response = client.post(f"/api/v1/run/{invalid_flow_id}", json=payload, headers=headers) - assert response.status_code == status.HTTP_404_NOT_FOUND - data = response.json() - assert "detail" in data - assert f"Flow identifier {invalid_flow_id} not found" in data["detail"] - - -@pytest.mark.api_key_required -def test_run_flow_with_caching_invalid_input_format(client: TestClient, starter_project, created_api_key): - flow_id = starter_project["id"] - headers = {"x-api-key": created_api_key.api_key} - payload = {"input_value": {"key": "value"}, "input_type": "text", "output_type": "text", "tweaks": {}} - response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) - assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY - - -@pytest.mark.api_key_required -def test_run_flow_with_invalid_tweaks(client, starter_project, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = starter_project["id"] - payload = { - "input_value": "value1", - "input_type": "text", - "output_type": "text", - "tweaks": {"invalid_tweak": "value"}, - } - response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) - assert response.status_code == status.HTTP_200_OK - - -@pytest.mark.api_key_required -def test_run_with_inputs_and_outputs(client, starter_project, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = starter_project["id"] - payload = { - "input_value": "value1", - "input_type": "text", - "output_type": "text", - "tweaks": {"parameter_name": "value"}, - "stream": False, - } - response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) - assert response.status_code == status.HTTP_200_OK, response.text - - -@pytest.mark.noclient -@pytest.mark.api_key_required -def test_run_flow_from_json_object(): - """Test loading a flow from a json file and applying tweaks""" - _, projects = zip(*load_starter_projects()) - project = [project for project in projects if "Basic Prompting" in project["name"]][0] - results = run_flow_from_json(project, input_value="test", fallback_to_env_vars=True) - assert results is not None - assert all(isinstance(result, RunOutputs) for result in results) diff --git a/src/backend/tests/integration/test_misc.py b/src/backend/tests/integration/test_misc.py new file mode 100644 index 000000000000..28afbd19b317 --- /dev/null +++ b/src/backend/tests/integration/test_misc.py @@ -0,0 +1,86 @@ +from uuid import uuid4 + +import pytest +from fastapi import status +from httpx import AsyncClient +from langflow.graph.schema import RunOutputs +from langflow.initial_setup.setup import load_starter_projects +from langflow.load import run_flow_from_json + + +@pytest.mark.api_key_required +async def test_run_flow_with_caching_success(client: AsyncClient, starter_project, created_api_key): + flow_id = starter_project["id"] + headers = {"x-api-key": created_api_key.api_key} + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"parameter_name": "value"}, + "stream": False, + } + response = await client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert "outputs" in data + assert "session_id" in data + + +@pytest.mark.api_key_required +async def test_run_flow_with_caching_invalid_flow_id(client: AsyncClient, created_api_key): + invalid_flow_id = uuid4() + headers = {"x-api-key": created_api_key.api_key} + payload = {"input_value": "", "input_type": "text", "output_type": "text", "tweaks": {}, "stream": False} + response = await client.post(f"/api/v1/run/{invalid_flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + data = response.json() + assert "detail" in data + assert f"Flow identifier {invalid_flow_id} not found" in data["detail"] + + +@pytest.mark.api_key_required +async def test_run_flow_with_caching_invalid_input_format(client: AsyncClient, starter_project, created_api_key): + flow_id = starter_project["id"] + headers = {"x-api-key": created_api_key.api_key} + payload = {"input_value": {"key": "value"}, "input_type": "text", "output_type": "text", "tweaks": {}} + response = await client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.api_key_required +async def test_run_flow_with_invalid_tweaks(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"invalid_tweak": "value"}, + } + response = await client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.api_key_required +async def test_run_with_inputs_and_outputs(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"parameter_name": "value"}, + "stream": False, + } + response = await client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK, response.text + + +@pytest.mark.noclient +@pytest.mark.api_key_required +def test_run_flow_from_json_object(): + """Test loading a flow from a json file and applying tweaks.""" + project = next(project for _, project in load_starter_projects() if "Basic Prompting" in project["name"]) + results = run_flow_from_json(project, input_value="test", fallback_to_env_vars=True) + assert results is not None + assert all(isinstance(result, RunOutputs) for result in results) diff --git a/src/backend/tests/integration/utils.py b/src/backend/tests/integration/utils.py index 1389cd082fd9..53b803e312a0 100644 --- a/src/backend/tests/integration/utils.py +++ b/src/backend/tests/integration/utils.py @@ -1,37 +1,44 @@ +import dataclasses import os -from typing import List +import uuid +from typing import Any +import requests from astrapy.admin import parse_api_endpoint +from langflow.api.v1.schemas import InputValueRequest +from langflow.custom import Component +from langflow.custom.eval import eval_custom_component_code from langflow.field_typing import Embeddings +from langflow.graph import Graph +from langflow.processing.process import run_graph_internal -def check_env_vars(*vars): - """ - Check if all specified environment variables are set. +def check_env_vars(*env_vars): + """Check if all specified environment variables are set. Args: - *vars (str): The environment variables to check. + *env_vars (str): The environment variables to check. Returns: bool: True if all environment variables are set, False otherwise. """ - return all(os.getenv(var) for var in vars) + return all(os.getenv(var) for var in env_vars) def valid_nvidia_vectorize_region(api_endpoint: str) -> bool: - """ - Check if the specified region is valid. + """Check if the specified region is valid. Args: - region (str): The region to check. + api_endpoint: The API endpoint to check. Returns: - bool: True if the region is contains hosted nvidia models, False otherwise. + True if the region contains hosted nvidia models, False otherwise. """ parsed_endpoint = parse_api_endpoint(api_endpoint) if not parsed_endpoint: - raise ValueError("Invalid ASTRA_DB_API_ENDPOINT") - return parsed_endpoint.region in ["us-east-2"] + msg = "Invalid ASTRA_DB_API_ENDPOINT" + raise ValueError(msg) + return parsed_endpoint.region == "us-east-2" class MockEmbeddings(Embeddings): @@ -43,10 +50,140 @@ def __init__(self): def mock_embedding(text: str): return [len(text) / 2, len(text) / 5, len(text) / 10] - def embed_documents(self, texts: List[str]) -> List[List[float]]: + def embed_documents(self, texts: list[str]) -> list[list[float]]: self.embedded_documents = texts return [self.mock_embedding(text) for text in texts] - def embed_query(self, text: str) -> List[float]: + def embed_query(self, text: str) -> list[float]: self.embedded_query = text return self.mock_embedding(text) + + +@dataclasses.dataclass +class JSONFlow: + json: dict + + def get_components_by_type(self, component_type): + result = [node["id"] for node in self.json["data"]["nodes"] if node["data"]["type"] == component_type] + if not result: + msg = ( + f"Component of type {component_type} not found, " + f"available types: {', '.join({node['data']['type'] for node in self.json['data']['nodes']})}" + ) + raise ValueError(msg) + return result + + def get_component_by_type(self, component_type): + components = self.get_components_by_type(component_type) + if len(components) > 1: + msg = f"Multiple components of type {component_type} found" + raise ValueError(msg) + return components[0] + + def set_value(self, component_id, key, value): + done = False + for node in self.json["data"]["nodes"]: + if node["id"] == component_id: + if key not in node["data"]["node"]["template"]: + msg = f"Component {component_id} does not have input {key}" + raise ValueError(msg) + node["data"]["node"]["template"][key]["value"] = value + node["data"]["node"]["template"][key]["load_from_db"] = False + done = True + break + if not done: + msg = f"Component {component_id} not found" + raise ValueError(msg) + + +def download_flow_from_github(name: str, version: str) -> JSONFlow: + response = requests.get( + f"https://raw.githubusercontent.com/langflow-ai/langflow/v{version}/src/backend/base/langflow/initial_setup/starter_projects/{name}.json", + timeout=10, + ) + response.raise_for_status() + as_json = response.json() + return JSONFlow(json=as_json) + + +def download_component_from_github(module: str, file_name: str, version: str) -> Component: + version_string = f"v{version}" if version != "main" else version + response = requests.get( + f"https://raw.githubusercontent.com/langflow-ai/langflow/{version_string}/src/backend/base/langflow/components/{module}/{file_name}.py", + timeout=10, + ) + response.raise_for_status() + return Component(_code=response.text) + + +async def run_json_flow( + json_flow: JSONFlow, run_input: Any | None = None, session_id: str | None = None +) -> dict[str, Any]: + graph = Graph.from_payload(json_flow.json) + return await run_flow(graph, run_input, session_id) + + +async def run_flow(graph: Graph, run_input: Any | None = None, session_id: str | None = None) -> dict[str, Any]: + graph.prepare() + graph_run_inputs = [InputValueRequest(input_value=run_input, type="chat")] if run_input else [] + + flow_id = str(uuid.uuid4()) + + results, _ = await run_graph_internal(graph, flow_id, session_id=session_id, inputs=graph_run_inputs) + outputs = {} + for r in results: + for out in r.outputs: + outputs |= out.results + return outputs + + +@dataclasses.dataclass +class ComponentInputHandle: + clazz: type + inputs: dict + output_name: str + + +async def run_single_component( + clazz: type, + inputs: dict | None = None, + run_input: Any | None = None, + session_id: str | None = None, + input_type: str | None = "chat", +) -> dict[str, Any]: + user_id = str(uuid.uuid4()) + flow_id = str(uuid.uuid4()) + graph = Graph(user_id=user_id, flow_id=flow_id) + + def _add_component(clazz: type, inputs: dict | None = None) -> str: + raw_inputs = {} + if inputs: + for key, value in inputs.items(): + if not isinstance(value, ComponentInputHandle): + raw_inputs[key] = value + if isinstance(value, Component): + msg = "Component inputs must be wrapped in ComponentInputHandle" + raise TypeError(msg) + component = clazz(**raw_inputs, _user_id=user_id) + component_id = graph.add_component(component) + if inputs: + for input_name, handle in inputs.items(): + if isinstance(handle, ComponentInputHandle): + handle_component_id = _add_component(handle.clazz, handle.inputs) + graph.add_component_edge(handle_component_id, (handle.output_name, input_name), component_id) + return component_id + + component_id = _add_component(clazz, inputs) + graph.prepare() + graph_run_inputs = [InputValueRequest(input_value=run_input, type=input_type)] if run_input else [] + + _, _ = await run_graph_internal( + graph, flow_id, session_id=session_id, inputs=graph_run_inputs, outputs=[component_id] + ) + return graph.get_vertex(component_id).built_object + + +def build_component_instance_for_tests(version: str, module: str, file_name: str, **kwargs): + component = download_component_from_github(module, file_name, version) + cc_class = eval_custom_component_code(component._code) + return cc_class(**kwargs), component._code diff --git a/src/backend/tests/locust/__init__.py b/src/backend/tests/locust/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/locust/locustfile.py b/src/backend/tests/locust/locustfile.py index 856e3f6dd33b..f7857e619e99 100644 --- a/src/backend/tests/locust/locustfile.py +++ b/src/backend/tests/locust/locustfile.py @@ -11,8 +11,8 @@ class NameTest(FastHttpUser): wait_time = between(1, 5) - with open("names.txt", "r") as file: - names = [line.strip() for line in file.readlines()] + with Path("names.txt").open(encoding="utf-8") as file: + names = [line.strip() for line in file] headers: dict = {} @@ -28,8 +28,9 @@ def poll_task(self, task_id, sleep_time=1): print(f"Poll Response: {response.js}") if status == "SUCCESS": return response.js.get("result") - elif status in ["FAILURE", "REVOKED"]: - raise ValueError(f"Task failed with status: {status}") + if status in {"FAILURE", "REVOKED"}: + msg = f"Task failed with status: {status}" + raise ValueError(msg) time.sleep(sleep_time) def process(self, name, flow_id, payload): @@ -45,7 +46,8 @@ def process(self, name, flow_id, payload): print(response.js) if response.status_code != 200: response.failure("Process call failed") - raise ValueError("Process call failed") + msg = "Process call failed" + raise ValueError(msg) task_id = response.js.get("id") session_id = response.js.get("session_id") assert task_id, "Inner Task ID not found" @@ -58,13 +60,13 @@ def process(self, name, flow_id, payload): @task def send_name_and_check(self): - name = random.choice(self.names) + name = random.choice(self.names) # noqa: S311 payload1 = { "inputs": {"text": f"Hello, My name is {name}"}, "sync": False, } - result1, session_id = self.process(name, self.flow_id, payload1) + _result1, session_id = self.process(name, self.flow_id, payload1) payload2 = { "inputs": {"text": "What is my name? Please, answer like this: Your name is "}, @@ -86,11 +88,9 @@ def on_start(self): a_token = tokens["access_token"] logged_in_headers = {"Authorization": f"Bearer {a_token}"} print("Logged in") - with open( - Path(__file__).parent.parent / "data" / "BasicChatwithPromptandHistory.json", - "r", - ) as f: - json_flow = f.read() + json_flow = (Path(__file__).parent.parent / "data" / "BasicChatwithPromptandHistory.json").read_text( + encoding="utf-8" + ) flow = orjson.loads(json_flow) data = flow["data"] # Create test data diff --git a/src/backend/tests/performance/__init__.py b/src/backend/tests/performance/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/performance/test_server_init.py b/src/backend/tests/performance/test_server_init.py new file mode 100644 index 000000000000..630efd17b0e9 --- /dev/null +++ b/src/backend/tests/performance/test_server_init.py @@ -0,0 +1,86 @@ +import asyncio + +import pytest +from langflow.services.deps import get_settings_service + + +@pytest.fixture(autouse=True) +def setup_database_url(tmp_path, monkeypatch): + """Setup a temporary database URL for testing.""" + db_path = tmp_path / "test_performance.db" + original_value = monkeypatch.delenv("LANGFLOW_DATABASE_URL", raising=False) + test_db_url = f"sqlite:///{db_path}" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", test_db_url) + yield + # Restore original value if it existed + if original_value is not None: + monkeypatch.setenv("LANGFLOW_DATABASE_URL", original_value) + else: + monkeypatch.delenv("LANGFLOW_DATABASE_URL", raising=False) + + +@pytest.mark.benchmark +async def test_initialize_services(): + """Benchmark the initialization of services.""" + from langflow.services.utils import initialize_services + + await asyncio.to_thread(initialize_services, fix_migration=False) + settings_service = await asyncio.to_thread(get_settings_service) + assert "test_performance.db" in settings_service.settings.database_url + + +@pytest.mark.benchmark +async def test_setup_llm_caching(): + """Benchmark LLM caching setup.""" + from langflow.interface.utils import setup_llm_caching + + await asyncio.to_thread(setup_llm_caching) + settings_service = await asyncio.to_thread(get_settings_service) + assert "test_performance.db" in settings_service.settings.database_url + + +@pytest.mark.benchmark +async def test_initialize_super_user(): + """Benchmark super user initialization.""" + from langflow.initial_setup.setup import initialize_super_user_if_needed + from langflow.services.utils import initialize_services + + await asyncio.to_thread(initialize_services, fix_migration=False) + await asyncio.to_thread(initialize_super_user_if_needed) + settings_service = await asyncio.to_thread(get_settings_service) + assert "test_performance.db" in settings_service.settings.database_url + + +@pytest.mark.benchmark +async def test_get_and_cache_all_types_dict(): + """Benchmark get_and_cache_all_types_dict function.""" + from langflow.interface.types import get_and_cache_all_types_dict + + settings_service = await asyncio.to_thread(get_settings_service) + result = await asyncio.to_thread(get_and_cache_all_types_dict, settings_service) + assert result is not None + assert "test_performance.db" in settings_service.settings.database_url + + +@pytest.mark.benchmark +async def test_create_starter_projects(): + """Benchmark creation of starter projects.""" + from langflow.initial_setup.setup import create_or_update_starter_projects + from langflow.interface.types import get_and_cache_all_types_dict + from langflow.services.utils import initialize_services + + await asyncio.to_thread(initialize_services, fix_migration=False) + settings_service = await asyncio.to_thread(get_settings_service) + types_dict = await get_and_cache_all_types_dict(settings_service) + await asyncio.to_thread(create_or_update_starter_projects, types_dict) + assert "test_performance.db" in settings_service.settings.database_url + + +@pytest.mark.benchmark +async def test_load_flows(): + """Benchmark loading flows from directory.""" + from langflow.initial_setup.setup import load_flows_from_directory + + await asyncio.to_thread(load_flows_from_directory) + settings_service = await asyncio.to_thread(get_settings_service) + assert "test_performance.db" in settings_service.settings.database_url diff --git a/src/backend/tests/test_endpoints.py b/src/backend/tests/test_endpoints.py deleted file mode 100644 index 130abed93d9b..000000000000 --- a/src/backend/tests/test_endpoints.py +++ /dev/null @@ -1,679 +0,0 @@ -import time -from uuid import UUID, uuid4 - -import pytest -from fastapi import status -from fastapi.testclient import TestClient - -from langflow.custom.directory_reader.directory_reader import DirectoryReader -from langflow.services.deps import get_settings_service - - -def run_post(client, flow_id, headers, post_data): - response = client.post( - f"api/v1/process/{flow_id}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - return response.json() - - -# Helper function to poll task status -def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1): - for _ in range(max_attempts): - task_status_response = client.get( - href, - headers=headers, - ) - if task_status_response.status_code == 200 and task_status_response.json()["status"] == "SUCCESS": - return task_status_response.json() - time.sleep(sleep_time) - return None # Return None if task did not complete in time - - -PROMPT_REQUEST = { - "name": "string", - "template": "string", - "frontend_node": { - "template": {}, - "description": "string", - "base_classes": ["string"], - "name": "", - "display_name": "", - "documentation": "", - "custom_fields": {}, - "output_types": [], - "field_formatters": { - "formatters": {"openai_api_key": {}}, - "base_formatters": { - "kwargs": {}, - "optional": {}, - "list": {}, - "dict": {}, - "union": {}, - "multiline": {}, - "show": {}, - "password": {}, - "default": {}, - "headers": {}, - "dict_code_file": {}, - "model_fields": { - "MODEL_DICT": { - "OpenAI": [ - "text-davinci-003", - "text-davinci-002", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - ], - "ChatOpenAI": [ - "gpt-4-turbo-preview", - "gpt-4-0125-preview", - "gpt-4-1106-preview", - "gpt-4-vision-preview", - "gpt-3.5-turbo-0125", - "gpt-3.5-turbo-1106", - ], - "Anthropic": [ - "claude-v1", - "claude-v1-100k", - "claude-instant-v1", - "claude-instant-v1-100k", - "claude-v1.3", - "claude-v1.3-100k", - "claude-v1.2", - "claude-v1.0", - "claude-instant-v1.1", - "claude-instant-v1.1-100k", - "claude-instant-v1.0", - ], - "ChatAnthropic": [ - "claude-v1", - "claude-v1-100k", - "claude-instant-v1", - "claude-instant-v1-100k", - "claude-v1.3", - "claude-v1.3-100k", - "claude-v1.2", - "claude-v1.0", - "claude-instant-v1.1", - "claude-instant-v1.1-100k", - "claude-instant-v1.0", - ], - } - }, - }, - }, - }, -} - - -# def test_process_flow_invalid_api_key(client, flow, monkeypatch): -# # Mock de process_graph_cached -# from langflow.api.v1 import endpoints -# from langflow.services.database.models.api_key import crud - -# settings_service = get_settings_service() -# settings_service.auth_settings.AUTO_LOGIN = False - -# async def mock_process_graph_cached(*args, **kwargs): -# return Result(result={}, session_id="session_id_mock") - -# def mock_update_total_uses(*args, **kwargs): -# return created_api_key - -# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) -# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) - -# headers = {"x-api-key": "invalid_api_key"} - -# post_data = { -# "inputs": {"key": "value"}, -# "tweaks": None, -# "clear_cache": False, -# "session_id": None, -# } - -# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - -# assert response.status_code == 403 -# assert response.json() == {"detail": "Invalid or missing API key"} - - -# def test_process_flow_invalid_id(client, monkeypatch, created_api_key): -# async def mock_process_graph_cached(*args, **kwargs): -# return Result(result={}, session_id="session_id_mock") - -# from langflow.api.v1 import endpoints - -# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) - -# api_key = created_api_key.api_key -# headers = {"x-api-key": api_key} - -# post_data = { -# "inputs": {"key": "value"}, -# "tweaks": None, -# "clear_cache": False, -# "session_id": None, -# } - -# invalid_id = uuid.uuid4() -# response = client.post(f"api/v1/process/{invalid_id}", headers=headers, json=post_data) - -# assert response.status_code == 404 -# assert f"Flow {invalid_id} not found" in response.json()["detail"] - - -# def test_process_flow_without_autologin(client, flow, monkeypatch, created_api_key): -# # Mock de process_graph_cached -# from langflow.api.v1 import endpoints -# from langflow.services.database.models.api_key import crud - -# settings_service = get_settings_service() -# settings_service.auth_settings.AUTO_LOGIN = False - -# async def mock_process_graph_cached(*args, **kwargs): -# return Result(result={}, session_id="session_id_mock") - -# def mock_process_graph_cached_task(*args, **kwargs): -# return Result(result={}, session_id="session_id_mock") - -# # The task function is ran like this: -# # if not self.use_celery: -# # return None, await task_func(*args, **kwargs) -# # if not hasattr(task_func, "apply"): -# # raise ValueError(f"Task function {task_func} does not have an apply method") -# # task = task_func.apply(args=args, kwargs=kwargs) -# # result = task.get() -# # return task.id, result -# # So we need to mock the task function to return a task object -# # and then mock the task object to return a result -# # maybe a named tuple would be better here -# task = namedtuple("task", ["id", "get"]) -# mock_process_graph_cached_task.apply = lambda *args, **kwargs: task( -# id="task_id_mock", get=lambda: Result(result={}, session_id="session_id_mock") -# ) - -# def mock_update_total_uses(*args, **kwargs): -# return created_api_key - -# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) -# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) -# monkeypatch.setattr(endpoints, "process_graph_cached_task", mock_process_graph_cached_task) - -# api_key = created_api_key.api_key -# headers = {"x-api-key": api_key} - -# # Dummy POST data -# post_data = { -# "inputs": {"input": "value"}, -# "tweaks": None, -# "clear_cache": False, -# "session_id": None, -# } - -# # Make the request to the FastAPI TestClient - -# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - -# # Check the response -# assert response.status_code == 200, response.json() -# assert response.json()["result"] == {}, response.json() -# assert response.json()["session_id"] == "session_id_mock", response.json() - - -# def test_process_flow_fails_autologin_off(client, flow, monkeypatch): -# # Mock de process_graph_cached -# from langflow.api.v1 import endpoints -# from langflow.services.database.models.api_key import crud - -# settings_service = get_settings_service() -# settings_service.auth_settings.AUTO_LOGIN = False - -# async def mock_process_graph_cached(*args, **kwargs): -# return Result(result={}, session_id="session_id_mock") - -# async def mock_update_total_uses(*args, **kwargs): -# return created_api_key - -# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) -# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) - -# headers = {"x-api-key": "api_key"} - -# # Dummy POST data -# post_data = { -# "inputs": {"key": "value"}, -# "tweaks": None, -# "clear_cache": False, -# "session_id": None, -# } - -# # Make the request to the FastAPI TestClient - -# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - -# # Check the response -# assert response.status_code == 403, response.json() -# assert response.json() == {"detail": "Invalid or missing API key"} - - -def test_get_all(client: TestClient, logged_in_headers): - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - settings = get_settings_service().settings - dir_reader = DirectoryReader(settings.components_path[0]) - files = dir_reader.get_files() - # json_response is a dict of dicts - all_names = [component_name for _, components in response.json().items() for component_name in components] - json_response = response.json() - # We need to test the custom nodes - assert len(all_names) <= len( - files - ) # Less or equal because we might have some files that don't have the dependencies installed - assert "ChatInput" in json_response["inputs"] - assert "Prompt" in json_response["prompts"] - assert "ChatOutput" in json_response["outputs"] - - -def test_post_validate_code(client: TestClient): - # Test case with a valid import and function - code1 = """ -import math - -def square(x): - return x ** 2 -""" - response1 = client.post("api/v1/validate/code", json={"code": code1}) - assert response1.status_code == 200 - assert response1.json() == {"imports": {"errors": []}, "function": {"errors": []}} - - # Test case with an invalid import and valid function - code2 = """ -import non_existent_module - -def square(x): - return x ** 2 -""" - response2 = client.post("api/v1/validate/code", json={"code": code2}) - assert response2.status_code == 200 - assert response2.json() == { - "imports": {"errors": ["No module named 'non_existent_module'"]}, - "function": {"errors": []}, - } - - # Test case with a valid import and invalid function syntax - code3 = """ -import math - -def square(x) - return x ** 2 -""" - response3 = client.post("api/v1/validate/code", json={"code": code3}) - assert response3.status_code == 200 - assert response3.json() == { - "imports": {"errors": []}, - "function": {"errors": ["expected ':' (, line 4)"]}, - } - - # Test case with invalid JSON payload - response4 = client.post("api/v1/validate/code", json={"invalid_key": code1}) - assert response4.status_code == 422 - - # Test case with an empty code string - response5 = client.post("api/v1/validate/code", json={"code": ""}) - assert response5.status_code == 200 - assert response5.json() == {"imports": {"errors": []}, "function": {"errors": []}} - - # Test case with a syntax error in the code - code6 = """ -import math - -def square(x) - return x ** 2 -""" - response6 = client.post("api/v1/validate/code", json={"code": code6}) - assert response6.status_code == 200 - assert response6.json() == { - "imports": {"errors": []}, - "function": {"errors": ["expected ':' (, line 4)"]}, - } - - -VALID_PROMPT = """ -I want you to act as a naming consultant for new companies. - -Here are some examples of good company names: - -- search engine, Google -- social media, Facebook -- video sharing, YouTube - -The name should be short, catchy and easy to remember. - -What is a good name for a company that makes {product}? -""" - -INVALID_PROMPT = "This is an invalid prompt without any input variable." - - -def test_valid_prompt(client: TestClient): - PROMPT_REQUEST["template"] = VALID_PROMPT - response = client.post("api/v1/validate/prompt", json=PROMPT_REQUEST) - assert response.status_code == 200 - assert response.json()["input_variables"] == ["product"] - - -def test_invalid_prompt(client: TestClient): - PROMPT_REQUEST["template"] = INVALID_PROMPT - response = client.post( - "api/v1/validate/prompt", - json=PROMPT_REQUEST, - ) - assert response.status_code == 200 - assert response.json()["input_variables"] == [] - - -@pytest.mark.parametrize( - "prompt,expected_input_variables", - [ - ("{color} is my favorite color.", ["color"]), - ("The weather is {weather} today.", ["weather"]), - ("This prompt has no variables.", []), - ("{a}, {b}, and {c} are variables.", ["a", "b", "c"]), - ], -) -def test_various_prompts(client, prompt, expected_input_variables): - PROMPT_REQUEST["template"] = prompt - response = client.post("api/v1/validate/prompt", json=PROMPT_REQUEST) - assert response.status_code == 200 - assert response.json()["input_variables"] == expected_input_variables - - -def test_get_vertices_flow_not_found(client, logged_in_headers): - uuid = uuid4() - response = client.post(f"/api/v1/build/{uuid}/vertices", headers=logged_in_headers) - assert response.status_code == 500 - - -def test_get_vertices(client, added_flow_with_prompt_and_history, logged_in_headers): - flow_id = added_flow_with_prompt_and_history["id"] - response = client.post(f"/api/v1/build/{flow_id}/vertices", headers=logged_in_headers) - assert response.status_code == 200 - assert "ids" in response.json() - # The response should contain the list in this order - # ['ConversationBufferMemory-Lu2Nb', 'PromptTemplate-5Q0W8', 'ChatOpenAI-vy7fV', 'LLMChain-UjBh1'] - # The important part is before the - (ConversationBufferMemory, PromptTemplate, ChatOpenAI, LLMChain) - ids = [_id.split("-")[0] for _id in response.json()["ids"]] - - assert set(ids) == { - "ChatOpenAI", - "PromptTemplate", - "ConversationBufferMemory", - } - - -def test_build_vertex_invalid_flow_id(client, logged_in_headers): - uuid = uuid4() - response = client.post(f"/api/v1/build/{uuid}/vertices/vertex_id", headers=logged_in_headers) - assert response.status_code == 500 - - -def test_build_vertex_invalid_vertex_id(client, added_flow_with_prompt_and_history, logged_in_headers): - flow_id = added_flow_with_prompt_and_history["id"] - response = client.post(f"/api/v1/build/{flow_id}/vertices/invalid_vertex_id", headers=logged_in_headers) - assert response.status_code == 500 - - -@pytest.mark.api_key_required -def test_successful_run_no_payload(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - response = client.post(f"/api/v1/run/{flow_id}", headers=headers) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": ""} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 1 - ids = [output.get("component_id") for output in outputs_dict.get("outputs")] - assert all(["ChatOutput" in _id for _id in ids]) - display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] - assert all([name in display_names for name in ["Chat Output"]]) - output_results_has_results = all("results" in output.get("results") for output in outputs_dict.get("outputs")) - inner_results = [output.get("results") for output in outputs_dict.get("outputs")] - - assert all([result is not None for result in inner_results]), (outputs_dict, output_results_has_results) - - -def test_successful_run_with_output_type_text(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "output_type": "text", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": ""} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 1 - ids = [output.get("component_id") for output in outputs_dict.get("outputs")] - assert all(["ChatOutput" in _id for _id in ids]), ids - display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] - assert all([name in display_names for name in ["Chat Output"]]), display_names - inner_results = [output.get("results") for output in outputs_dict.get("outputs")] - expected_keys = ["message"] - assert all([key in result for result in inner_results for key in expected_keys]), outputs_dict - - -def test_successful_run_with_output_type_any(client, simple_api_test, created_api_key): - # This one should have both the ChatOutput and TextOutput components - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "output_type": "any", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": ""} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 1 - ids = [output.get("component_id") for output in outputs_dict.get("outputs")] - assert all(["ChatOutput" in _id or "TextOutput" in _id for _id in ids]), ids - display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] - assert all([name in display_names for name in ["Chat Output"]]), display_names - inner_results = [output.get("results") for output in outputs_dict.get("outputs")] - expected_keys = ["message"] - assert all([key in result for result in inner_results for key in expected_keys]), outputs_dict - - -def test_successful_run_with_output_type_debug(client, simple_api_test, created_api_key): - # This one should return outputs for all components - # Let's just check the amount of outputs(there should be 7) - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "output_type": "debug", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": ""} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 3 - - -def test_successful_run_with_input_type_text(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "input_type": "text", - "output_type": "debug", - "input_value": "value1", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": "value1"} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 3 - # Now we get all components that contain TextInput in the component_id - text_input_outputs = [output for output in outputs_dict.get("outputs") if "TextInput" in output.get("component_id")] - assert len(text_input_outputs) == 1 - # Now we check if the input_value is correct - # We get text key twice because the output is now a Message - assert all( - [output.get("results").get("text").get("text") == "value1" for output in text_input_outputs] - ), text_input_outputs - - -def test_successful_run_with_input_type_chat(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "input_type": "chat", - "output_type": "debug", - "input_value": "value1", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": "value1"} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 3 - # Now we get all components that contain TextInput in the component_id - chat_input_outputs = [output for output in outputs_dict.get("outputs") if "ChatInput" in output.get("component_id")] - assert len(chat_input_outputs) == 1 - # Now we check if the input_value is correct - assert all( - [output.get("results").get("message").get("text") == "value1" for output in chat_input_outputs] - ), chat_input_outputs - - -def test_invalid_run_with_input_type_chat(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "input_type": "chat", - "output_type": "debug", - "input_value": "value1", - "tweaks": {"Chat Input": {"input_value": "value2"}}, - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_400_BAD_REQUEST, response.text - assert "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." in response.text - - -def test_successful_run_with_input_type_any(client, simple_api_test, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = simple_api_test["id"] - payload = { - "input_type": "any", - "output_type": "debug", - "input_value": "value1", - } - response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) - assert response.status_code == status.HTTP_200_OK, response.text - # Add more assertions here to validate the response content - json_response = response.json() - assert "session_id" in json_response - assert "outputs" in json_response - outer_outputs = json_response["outputs"] - assert len(outer_outputs) == 1 - outputs_dict = outer_outputs[0] - assert len(outputs_dict) == 2 - assert "inputs" in outputs_dict - assert "outputs" in outputs_dict - assert outputs_dict.get("inputs") == {"input_value": "value1"} - assert isinstance(outputs_dict.get("outputs"), list) - assert len(outputs_dict.get("outputs")) == 3 - # Now we get all components that contain TextInput or ChatInput in the component_id - any_input_outputs = [ - output - for output in outputs_dict.get("outputs") - if "TextInput" in output.get("component_id") or "ChatInput" in output.get("component_id") - ] - assert len(any_input_outputs) == 2 - # Now we check if the input_value is correct - all_result_dicts = [output.get("results") for output in any_input_outputs] - all_message_or_text_dicts = [ - result_dict.get("message", result_dict.get("text")) for result_dict in all_result_dicts - ] - assert all( - [message_or_text_dict.get("text") == "value1" for message_or_text_dict in all_message_or_text_dicts] - ), any_input_outputs - - -def test_invalid_flow_id(client, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - flow_id = "invalid-flow-id" - response = client.post(f"/api/v1/run/{flow_id}", headers=headers) - assert response.status_code == status.HTTP_404_NOT_FOUND, response.text - headers = {"x-api-key": created_api_key.api_key} - flow_id = UUID(int=0) - response = client.post(f"/api/v1/run/{flow_id}", headers=headers) - assert response.status_code == status.HTTP_404_NOT_FOUND, response.text - # Check if the error detail is as expected - - -def test_starter_projects(client, created_api_key): - headers = {"x-api-key": created_api_key.api_key} - response = client.get("/api/v1/starter-projects/", headers=headers) - assert response.status_code == status.HTTP_200_OK, response.text diff --git a/src/backend/tests/test_messages_endpoints.py b/src/backend/tests/test_messages_endpoints.py deleted file mode 100644 index ee4021784c60..000000000000 --- a/src/backend/tests/test_messages_endpoints.py +++ /dev/null @@ -1,76 +0,0 @@ -from uuid import UUID - -import pytest -from fastapi.testclient import TestClient - -from langflow.memory import add_messagetables - -# Assuming you have these imports available -from langflow.services.database.models.message import MessageCreate, MessageRead, MessageUpdate -from langflow.services.database.models.message.model import MessageTable -from langflow.services.deps import session_scope - - -@pytest.fixture() -def created_message(): - with session_scope() as session: - message = MessageCreate(text="Test message", sender="User", sender_name="User", session_id="session_id") - messagetable = MessageTable.model_validate(message, from_attributes=True) - messagetables = add_messagetables([messagetable], session) - message_read = MessageRead.model_validate(messagetables[0], from_attributes=True) - return message_read - - -@pytest.fixture() -def created_messages(session): - with session_scope() as session: - messages = [ - MessageCreate(text="Test message 1", sender="User", sender_name="User", session_id="session_id2"), - MessageCreate(text="Test message 2", sender="User", sender_name="User", session_id="session_id2"), - MessageCreate(text="Test message 3", sender="User", sender_name="User", session_id="session_id2"), - ] - messagetables = [MessageTable.model_validate(message, from_attributes=True) for message in messages] - message_list = add_messagetables(messagetables, session) - - return message_list - - -def test_delete_messages(client: TestClient, created_messages, logged_in_headers): - response = client.request( - "DELETE", "api/v1/monitor/messages", json=[str(msg.id) for msg in created_messages], headers=logged_in_headers - ) - assert response.status_code == 204, response.text - assert response.reason_phrase == "No Content" - - -def test_update_message(client: TestClient, logged_in_headers, created_message): - message_id = created_message.id - message_update = MessageUpdate(text="Updated content") - response = client.put( - f"api/v1/monitor/messages/{message_id}", json=message_update.model_dump(), headers=logged_in_headers - ) - assert response.status_code == 200, response.text - updated_message = MessageRead(**response.json()) - assert updated_message.text == "Updated content" - - -def test_update_message_not_found(client: TestClient, logged_in_headers): - non_existent_id = UUID("00000000-0000-0000-0000-000000000000") - message_update = MessageUpdate(text="Updated content") - response = client.put( - f"api/v1/monitor/messages/{non_existent_id}", json=message_update.model_dump(), headers=logged_in_headers - ) - assert response.status_code == 404, response.text - assert response.json()["detail"] == "Message not found" - - -def test_delete_messages_session(client: TestClient, created_messages, logged_in_headers): - session_id = "session_id2" - response = client.delete(f"api/v1/monitor/messages/session/{session_id}", headers=logged_in_headers) - assert response.status_code == 204 - assert response.reason_phrase == "No Content" - - assert len(created_messages) == 3 - response = client.get("api/v1/monitor/messages", headers=logged_in_headers) - assert response.status_code == 200 - assert len(response.json()) == 0 diff --git a/src/backend/tests/test_schema.py b/src/backend/tests/test_schema.py deleted file mode 100644 index 75c2002d615e..000000000000 --- a/src/backend/tests/test_schema.py +++ /dev/null @@ -1,131 +0,0 @@ -from typing import Sequence, Union - -import pytest -from pydantic import ValidationError - -from langflow.template import Input, Output -from langflow.template.field.base import UNDEFINED -from langflow.type_extraction.type_extraction import post_process_type - - -@pytest.fixture(name="client", autouse=True) -def client_fixture(): - pass - - -class TestInput: - def test_field_type_str(self): - input_obj = Input(field_type="str") - assert input_obj.field_type == "str" - - def test_field_type_type(self): - input_obj = Input(field_type=int) - assert input_obj.field_type == "int" - - def test_invalid_field_type(self): - with pytest.raises(ValidationError): - Input(field_type=123) - - def test_serialize_field_type(self): - input_obj = Input(field_type="str") - assert input_obj.serialize_field_type("str", None) == "str" - - def test_validate_type_string(self): - input_obj = Input(field_type="str") - assert input_obj.field_type == "str" - - def test_validate_type_class(self): - input_obj = Input(field_type=int) - assert input_obj.field_type == "int" - - def test_post_process_type_function(self): - assert post_process_type(int) == [int] - assert post_process_type(list[int]) == [int] - assert post_process_type(Union[int, str]) == [int, str] - assert post_process_type(Union[int, Sequence[str]]) == [int, str] - assert post_process_type(Union[int, Sequence[int]]) == [int] - - def test_input_to_dict(self): - input_obj = Input(field_type="str") - assert input_obj.to_dict() == { - "type": "str", - "required": False, - "placeholder": "", - "list": False, - "show": True, - "multiline": False, - "fileTypes": [], - "file_path": "", - "password": False, - "advanced": False, - "title_case": False, - "dynamic": False, - "info": "", - "input_types": ["Text"], - "load_from_db": False, - } - - -class TestOutput: - def test_output_default(self): - output_obj = Output(name="test_output") - assert output_obj.name == "test_output" - assert output_obj.value == UNDEFINED - assert output_obj.cache is True - - def test_output_add_types(self): - output_obj = Output(name="test_output") - output_obj.add_types(["str", "int"]) - assert output_obj.types == ["str", "int"] - - def test_output_set_selected(self): - output_obj = Output(name="test_output", types=["str", "int"]) - output_obj.set_selected() - assert output_obj.selected == "str" - - def test_output_to_dict(self): - output_obj = Output(name="test_output") - assert output_obj.to_dict() == { - "types": [], - "name": "test_output", - "display_name": "test_output", - "cache": True, - "value": "__UNDEFINED__", - } - - def test_output_validate_display_name(self): - output_obj = Output(name="test_output") - assert output_obj.display_name == "test_output" - - def test_output_validate_model(self): - output_obj = Output(name="test_output", value="__UNDEFINED__") - assert output_obj.validate_model() == output_obj - - -class TestPostProcessType: - def test_int_type(self): - assert post_process_type(int) == [int] - - def test_list_int_type(self): - assert post_process_type(list[int]) == [int] - - def test_union_type(self): - assert post_process_type(Union[int, str]) == [int, str] - - def test_custom_type(self): - class CustomType: - pass - - assert post_process_type(CustomType) == [CustomType] - - def test_list_custom_type(self): - class CustomType: - pass - - assert post_process_type(list[CustomType]) == [CustomType] - - def test_union_custom_type(self): - class CustomType: - pass - - assert set(post_process_type(Union[CustomType, int])) == {CustomType, int} diff --git a/src/backend/tests/test_user.py b/src/backend/tests/test_user.py deleted file mode 100644 index 86d4b18650f6..000000000000 --- a/src/backend/tests/test_user.py +++ /dev/null @@ -1,232 +0,0 @@ -from datetime import datetime - -import pytest - -from langflow.services.auth.utils import create_super_user, get_password_hash -from langflow.services.database.models.user import UserUpdate -from langflow.services.database.models.user.model import User -from langflow.services.database.utils import session_getter -from langflow.services.deps import get_db_service, get_settings_service - - -@pytest.fixture -def super_user(client): - settings_manager = get_settings_service() - auth_settings = settings_manager.auth_settings - with session_getter(get_db_service()) as session: - return create_super_user( - db=session, - username=auth_settings.SUPERUSER, - password=auth_settings.SUPERUSER_PASSWORD, - ) - - -@pytest.fixture -def super_user_headers(client, super_user): - settings_service = get_settings_service() - auth_settings = settings_service.auth_settings - login_data = { - "username": auth_settings.SUPERUSER, - "password": auth_settings.SUPERUSER_PASSWORD, - } - response = client.post("/api/v1/login", data=login_data) - assert response.status_code == 200 - tokens = response.json() - a_token = tokens["access_token"] - return {"Authorization": f"Bearer {a_token}"} - - -@pytest.fixture -def deactivated_user(): - with session_getter(get_db_service()) as session: - user = User( - username="deactivateduser", - password=get_password_hash("testpassword"), - is_active=False, - is_superuser=False, - last_login_at=datetime.now(), - ) - session.add(user) - session.commit() - session.refresh(user) - return user - - -def test_user_waiting_for_approval( - client, -): - # Create a user that is not active and has never logged in - with session_getter(get_db_service()) as session: - user = User( - username="waitingforapproval", - password=get_password_hash("testpassword"), - is_active=False, - last_login_at=None, - ) - session.add(user) - session.commit() - - login_data = {"username": "waitingforapproval", "password": "testpassword"} - response = client.post("/api/v1/login", data=login_data) - assert response.status_code == 400 - assert response.json()["detail"] == "Waiting for approval" - - -def test_deactivated_user_cannot_login(client, deactivated_user): - login_data = {"username": deactivated_user.username, "password": "testpassword"} - response = client.post("/api/v1/login", data=login_data) - assert response.status_code == 401, response.json() - assert response.json()["detail"] == "Inactive user", response.text - - -def test_deactivated_user_cannot_access(client, deactivated_user, logged_in_headers): - # Assuming the headers for deactivated_user - response = client.get("/api/v1/users", headers=logged_in_headers) - assert response.status_code == 403, response.json() - assert response.json()["detail"] == "The user doesn't have enough privileges", response.text - - -def test_data_consistency_after_update(client, active_user, logged_in_headers, super_user_headers): - user_id = active_user.id - update_data = UserUpdate(is_active=False) - - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=super_user_headers) - assert response.status_code == 200, response.json() - - # Fetch the updated user from the database - response = client.get("/api/v1/users/whoami", headers=logged_in_headers) - assert response.status_code == 401, response.json() - assert response.json()["detail"] == "User not found or is inactive." - - -def test_data_consistency_after_delete(client, test_user, super_user_headers): - user_id = test_user.get("id") - response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) - assert response.status_code == 200, response.json() - - # Attempt to fetch the deleted user from the database - response = client.get("/api/v1/users", headers=super_user_headers) - assert response.status_code == 200 - assert all(user["id"] != user_id for user in response.json()["users"]) - - -def test_inactive_user(client): - # Create a user that is not active and has a last_login_at value - with session_getter(get_db_service()) as session: - user = User( - username="inactiveuser", - password=get_password_hash("testpassword"), - is_active=False, - last_login_at=datetime(2023, 1, 1, 0, 0, 0), - ) - session.add(user) - session.commit() - - login_data = {"username": "inactiveuser", "password": "testpassword"} - response = client.post("/api/v1/login", data=login_data) - assert response.status_code == 401 - assert response.json()["detail"] == "Inactive user" - - -def test_add_user(client, test_user): - assert test_user["username"] == "testuser" - - -# This is not used in the Frontend at the moment -# def test_read_current_user(client: TestClient, active_user): -# # First we need to login to get the access token -# login_data = {"username": "testuser", "password": "testpassword"} -# response = client.post("/api/v1/login", data=login_data) -# assert response.status_code == 200 - -# headers = {"Authorization": f"Bearer {response.json()['access_token']}"} - -# response = client.get("/api/v1/user", headers=headers) -# assert response.status_code == 200, response.json() -# assert response.json()["username"] == "testuser" - - -def test_read_all_users(client, super_user_headers): - response = client.get("/api/v1/users", headers=super_user_headers) - assert response.status_code == 200, response.json() - assert isinstance(response.json()["users"], list) - - -def test_normal_user_cant_read_all_users(client, logged_in_headers): - response = client.get("/api/v1/users", headers=logged_in_headers) - assert response.status_code == 403, response.json() - assert response.json() == {"detail": "The user doesn't have enough privileges"} - - -def test_patch_user(client, active_user, logged_in_headers): - user_id = active_user.id - update_data = UserUpdate( - username="newname", - ) - - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) - assert response.status_code == 200, response.json() - update_data = UserUpdate( - profile_image="new_image", - ) - - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) - assert response.status_code == 200, response.json() - - -def test_patch_reset_password(client, active_user, logged_in_headers): - user_id = active_user.id - update_data = UserUpdate( - password="newpassword", - ) - - response = client.patch( - f"/api/v1/users/{user_id}/reset-password", - json=update_data.model_dump(), - headers=logged_in_headers, - ) - assert response.status_code == 200, response.json() - # Now we need to test if the new password works - login_data = {"username": active_user.username, "password": "newpassword"} - response = client.post("/api/v1/login", data=login_data) - assert response.status_code == 200 - - -def test_patch_user_wrong_id(client, active_user, logged_in_headers): - user_id = "wrong_id" - update_data = UserUpdate( - username="newname", - ) - - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) - assert response.status_code == 422, response.json() - json_response = response.json() - detail = json_response["detail"] - error = detail[0] - assert error["loc"] == ["path", "user_id"] - assert error["type"] == "uuid_parsing" - - -def test_delete_user(client, test_user, super_user_headers): - user_id = test_user["id"] - response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) - assert response.status_code == 200 - assert response.json() == {"detail": "User deleted"} - - -def test_delete_user_wrong_id(client, test_user, super_user_headers): - user_id = "wrong_id" - response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) - assert response.status_code == 422 - json_response = response.json() - detail = json_response["detail"] - error = detail[0] - assert error["loc"] == ["path", "user_id"] - assert error["type"] == "uuid_parsing" - - -def test_normal_user_cant_delete_user(client, test_user, logged_in_headers): - user_id = test_user["id"] - response = client.delete(f"/api/v1/users/{user_id}", headers=logged_in_headers) - assert response.status_code == 403 - assert response.json() == {"detail": "The user doesn't have enough privileges"} diff --git a/src/backend/tests/test_webhook.py b/src/backend/tests/test_webhook.py deleted file mode 100644 index ad1286ac3537..000000000000 --- a/src/backend/tests/test_webhook.py +++ /dev/null @@ -1,59 +0,0 @@ -import tempfile -from pathlib import Path - -import pytest - - -@pytest.fixture(autouse=True) -def check_openai_api_key_in_environment_variables(): - pass - - -def test_webhook_endpoint(client, added_webhook_test): - # The test is as follows: - # 1. The flow when run will get a "path" from the payload and save a file with the path as the name. - # We will create a temporary file path and send it to the webhook endpoint, then check if the file exists. - # 2. we will delete the file, then send an invalid payload to the webhook endpoint and check if the file exists. - endpoint_name = added_webhook_test["endpoint_name"] - endpoint = f"api/v1/webhook/{endpoint_name}" - # Create a temporary file - with tempfile.TemporaryDirectory() as tmp: - file_path = Path(tmp) / "test_file.txt" - - payload = {"path": str(file_path)} - - response = client.post(endpoint, json=payload) - assert response.status_code == 202 - assert file_path.exists() - - assert not file_path.exists() - - # Send an invalid payload - payload = {"invalid_key": "invalid_value"} - response = client.post(endpoint, json=payload) - assert response.status_code == 202 - assert not file_path.exists() - - -def test_webhook_flow_on_run_endpoint(client, added_webhook_test, created_api_key): - endpoint_name = added_webhook_test["endpoint_name"] - endpoint = f"api/v1/run/{endpoint_name}?stream=false" - # Just test that "Random Payload" returns 202 - # returns 202 - payload = { - "output_type": "any", - } - response = client.post(endpoint, headers={"x-api-key": created_api_key.api_key}, json=payload) - assert response.status_code == 200, response.json() - - -def test_webhook_with_random_payload(client, added_webhook_test): - endpoint_name = added_webhook_test["endpoint_name"] - endpoint = f"api/v1/webhook/{endpoint_name}" - # Just test that "Random Payload" returns 202 - # returns 202 - response = client.post( - endpoint, - json="Random Payload", - ) - assert response.status_code == 202 diff --git a/src/backend/tests/unit/api/__init__.py b/src/backend/tests/unit/api/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/api/test_api_utils.py b/src/backend/tests/unit/api/test_api_utils.py index b21bc25d2b55..f87befdaf4f6 100644 --- a/src/backend/tests/unit/api/test_api_utils.py +++ b/src/backend/tests/unit/api/test_api_utils.py @@ -1,5 +1,6 @@ -from langflow.api.utils import get_suggestion_message from unittest.mock import patch + +from langflow.api.utils import get_suggestion_message from langflow.services.database.models.flow.utils import get_outdated_components from langflow.utils.version import get_version_info @@ -16,7 +17,10 @@ def test_get_suggestion_message(): # Test case 3: Multiple outdated components outdated_components = ["component1", "component2", "component3"] - expected_message = "The flow contains 3 outdated components. We recommend updating the following components: component1, component2, component3." + expected_message = ( + "The flow contains 3 outdated components. " + "We recommend updating the following components: component1, component2, component3." + ) assert get_suggestion_message(outdated_components) == expected_message diff --git a/src/backend/tests/unit/api/v1/__init__.py b/src/backend/tests/unit/api/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/api/v1/test_api_key.py b/src/backend/tests/unit/api/v1/test_api_key.py new file mode 100644 index 000000000000..e934674394e1 --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_api_key.py @@ -0,0 +1,64 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_create_folder(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/api_key/", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "api_keys" in result, "The dictionary must contain a key called 'api_keys'" + assert "user_id" in result, "The dictionary must contain a key called 'user_id'" + assert "total_count" in result, "The dictionary must contain a key called 'total_count'" + + +async def test_create_api_key_route(client: AsyncClient, logged_in_headers, active_user): + basic_case = { + "name": "string", + "total_uses": 0, + "is_active": True, + "api_key": "string", + "user_id": str(active_user.id), + } + response = await client.post("api/v1/api_key/", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "api_key" in result, "The dictionary must contain a key called 'api_key'" + assert "id" in result, "The dictionary must contain a key called 'id'" + assert "is_active" in result, "The dictionary must contain a key called 'is_active'" + assert "last_used_at" in result, "The dictionary must contain a key called 'last_used_at'" + assert "name" in result, "The dictionary must contain a key called 'name'" + assert "total_uses" in result, "The dictionary must contain a key called 'total_uses'" + assert "user_id" in result, "The dictionary must contain a key called 'user_id'" + + +async def test_delete_api_key_route(client: AsyncClient, logged_in_headers, active_user): + basic_case = { + "name": "string", + "total_uses": 0, + "is_active": True, + "api_key": "string", + "user_id": str(active_user.id), + } + _response = await client.post("api/v1/api_key/", json=basic_case, headers=logged_in_headers) + _id = _response.json()["id"] + + response = await client.delete(f"api/v1/api_key/{_id}", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "detail" in result, "The dictionary must contain a key called 'detail'" + + +async def test_save_store_api_key(client: AsyncClient, logged_in_headers): + basic_case = {"api_key": "string"} + response = await client.post("api/v1/api_key/store", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "detail" in result, "The dictionary must contain a key called 'detail'" diff --git a/src/backend/tests/unit/api/v1/test_endpoints.py b/src/backend/tests/unit/api/v1/test_endpoints.py new file mode 100644 index 000000000000..5003bc83803e --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_endpoints.py @@ -0,0 +1,54 @@ +import asyncio +from pathlib import Path +from typing import Any + +from fastapi import status +from httpx import AsyncClient +from langflow.api.v1.schemas import UpdateCustomComponentRequest + + +async def get_dynamic_output_component_code(): + return await asyncio.to_thread( + Path("src/backend/tests/data/dynamic_output_component.py").read_text, encoding="utf-8" + ) + + +async def test_get_version(client: AsyncClient): + response = await client.get("api/v1/version") + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "version" in result, "The dictionary must contain a key called 'version'" + assert "main_version" in result, "The dictionary must contain a key called 'main_version'" + assert "package" in result, "The dictionary must contain a key called 'package'" + + +async def test_get_config(client: AsyncClient): + response = await client.get("api/v1/config") + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "frontend_timeout" in result, "The dictionary must contain a key called 'frontend_timeout'" + assert "auto_saving" in result, "The dictionary must contain a key called 'auto_saving'" + assert "health_check_max_retries" in result, "The dictionary must contain a 'health_check_max_retries' key" + assert "max_file_size_upload" in result, "The dictionary must contain a key called 'max_file_size_upload'" + + +async def test_update_component_outputs(client: AsyncClient, logged_in_headers: dict): + code = await get_dynamic_output_component_code() + frontend_node: dict[str, Any] = {"outputs": []} + request = UpdateCustomComponentRequest( + code=code, + frontend_node=frontend_node, + field="show_output", + field_value=True, + template={}, + ) + response = await client.post("api/v1/custom_component/update", json=request.model_dump(), headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + output_names = [output["name"] for output in result["outputs"]] + assert "tool_output" in output_names diff --git a/src/backend/tests/unit/api/v1/test_flows.py b/src/backend/tests/unit/api/v1/test_flows.py new file mode 100644 index 000000000000..27a9ab5d8749 --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_flows.py @@ -0,0 +1,172 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_create_flow(client: AsyncClient, logged_in_headers): + basic_case = { + "name": "string", + "description": "string", + "icon": "string", + "icon_bg_color": "#ff00ff", + "gradient": "string", + "data": {}, + "is_component": False, + "webhook": False, + "endpoint_name": "string", + "tags": ["string"], + "user_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "folder_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + } + response = await client.post("api/v1/flows/", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_201_CREATED + assert isinstance(result, dict), "The result must be a dictionary" + assert "data" in result, "The result must have a 'data' key" + assert "description" in result, "The result must have a 'description' key" + assert "endpoint_name" in result, "The result must have a 'endpoint_name' key" + assert "folder_id" in result, "The result must have a 'folder_id' key" + assert "gradient" in result, "The result must have a 'gradient' key" + assert "icon" in result, "The result must have a 'icon' key" + assert "icon_bg_color" in result, "The result must have a 'icon_bg_color' key" + assert "id" in result, "The result must have a 'id' key" + assert "is_component" in result, "The result must have a 'is_component' key" + assert "name" in result, "The result must have a 'name' key" + assert "tags" in result, "The result must have a 'tags' key" + assert "updated_at" in result, "The result must have a 'updated_at' key" + assert "user_id" in result, "The result must have a 'user_id' key" + assert "webhook" in result, "The result must have a 'webhook' key" + + +async def test_read_flows(client: AsyncClient, logged_in_headers): + params = { + "remove_example_flows": False, + "components_only": False, + "get_all": True, + "header_flows": False, + "page": 1, + "size": 50, + } + response = await client.get("api/v1/flows/", params=params, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, list), "The result must be a list" + + +async def test_read_flow(client: AsyncClient, logged_in_headers): + basic_case = { + "name": "string", + "description": "string", + "icon": "string", + "icon_bg_color": "#ff00ff", + "gradient": "string", + "data": {}, + "is_component": False, + "webhook": False, + "endpoint_name": "string", + "tags": ["string"], + "user_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "folder_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + } + _response = await client.post("api/v1/flows/", json=basic_case, headers=logged_in_headers) + _id = _response.json()["id"] + response = await client.get(f"api/v1/flows/{_id}", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "data" in result, "The result must have a 'data' key" + assert "description" in result, "The result must have a 'description' key" + assert "endpoint_name" in result, "The result must have a 'endpoint_name' key" + assert "folder_id" in result, "The result must have a 'folder_id' key" + assert "gradient" in result, "The result must have a 'gradient' key" + assert "icon" in result, "The result must have a 'icon' key" + assert "icon_bg_color" in result, "The result must have a 'icon_bg_color' key" + assert "id" in result, "The result must have a 'id' key" + assert "is_component" in result, "The result must have a 'is_component' key" + assert "name" in result, "The result must have a 'name' key" + assert "tags" in result, "The result must have a 'tags' key" + assert "updated_at" in result, "The result must have a 'updated_at' key" + assert "user_id" in result, "The result must have a 'user_id' key" + assert "webhook" in result, "The result must have a 'webhook' key" + + +async def test_update_flow(client: AsyncClient, logged_in_headers): + name = "first_name" + updated_name = "second_name" + basic_case = { + "description": "string", + "icon": "string", + "icon_bg_color": "#ff00ff", + "gradient": "string", + "data": {}, + "is_component": False, + "webhook": False, + "endpoint_name": "string", + "tags": ["string"], + "user_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "folder_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + } + basic_case["name"] = name + _response = await client.post("api/v1/flows/", json=basic_case, headers=logged_in_headers) + _id = _response.json()["id"] + + basic_case["name"] = updated_name + response = await client.patch(f"api/v1/flows/{_id}", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert isinstance(result, dict), "The result must be a dictionary" + assert "data" in result, "The result must have a 'data' key" + assert "description" in result, "The result must have a 'description' key" + assert "endpoint_name" in result, "The result must have a 'endpoint_name' key" + assert "folder_id" in result, "The result must have a 'folder_id' key" + assert "gradient" in result, "The result must have a 'gradient' key" + assert "icon" in result, "The result must have a 'icon' key" + assert "icon_bg_color" in result, "The result must have a 'icon_bg_color' key" + assert "id" in result, "The result must have a 'id' key" + assert "is_component" in result, "The result must have a 'is_component' key" + assert "name" in result, "The result must have a 'name' key" + assert "tags" in result, "The result must have a 'tags' key" + assert "updated_at" in result, "The result must have a 'updated_at' key" + assert "user_id" in result, "The result must have a 'user_id' key" + assert "webhook" in result, "The result must have a 'webhook' key" + assert result["name"] == updated_name, "The name must be updated" + + +async def test_create_flows(client: AsyncClient, logged_in_headers): + amount_flows = 10 + basic_case = { + "description": "string", + "icon": "string", + "icon_bg_color": "#ff00ff", + "gradient": "string", + "data": {}, + "is_component": False, + "webhook": False, + "tags": ["string"], + "user_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "folder_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + } + cases = [] + for i in range(amount_flows): + case = basic_case.copy() + case["name"] = f"string_{i}" + case["endpoint_name"] = f"string_{i}" + cases.append(case) + + response = await client.post("api/v1/flows/batch/", json={"flows": cases}, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_201_CREATED + assert isinstance(result, list), "The result must be a list" + assert len(result) == amount_flows, "The result must have the same amount of flows" + + +async def test_read_basic_examples(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/flows/basic_examples/", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, list), "The result must be a list" + assert len(result) > 0, "The result must have at least one flow" diff --git a/src/backend/tests/unit/api/v1/test_folders.py b/src/backend/tests/unit/api/v1/test_folders.py new file mode 100644 index 000000000000..e19451623ea4 --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_folders.py @@ -0,0 +1,64 @@ +import pytest +from fastapi import status +from httpx import AsyncClient + + +@pytest.fixture +def basic_case(): + return { + "name": "New Folder", + "description": "", + "flows_list": [], + "components_list": [], + } + + +async def test_create_folder(client: AsyncClient, logged_in_headers, basic_case): + response = await client.post("api/v1/folders/", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_201_CREATED + assert isinstance(result, dict), "The result must be a dictionary" + assert "name" in result, "The dictionary must contain a key called 'name'" + assert "description" in result, "The dictionary must contain a key called 'description'" + assert "id" in result, "The dictionary must contain a key called 'id'" + assert "parent_id" in result, "The dictionary must contain a key called 'parent_id'" + + +async def test_read_folders(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/folders/", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, list), "The result must be a list" + assert len(result) > 0, "The list must not be empty" + + +async def test_read_folder(client: AsyncClient, logged_in_headers, basic_case): + _response = await client.post("api/v1/folders/", json=basic_case, headers=logged_in_headers) + _id = _response.json()["id"] + response = await client.get(f"api/v1/folders/{_id}", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "name" in result, "The dictionary must contain a key called 'name'" + assert "description" in result, "The dictionary must contain a key called 'description'" + assert "id" in result, "The dictionary must contain a key called 'id'" + assert "parent_id" in result, "The dictionary must contain a key called 'parent_id'" + + +async def test_update_folder(client: AsyncClient, logged_in_headers, basic_case): + update_case = basic_case.copy() + update_case["name"] = "Updated Folder" + _response = await client.post("api/v1/folders/", json=basic_case, headers=logged_in_headers) + _id = _response.json()["id"] + response = await client.patch(f"api/v1/folders/{_id}", json=update_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "name" in result, "The dictionary must contain a key called 'name'" + assert "description" in result, "The dictionary must contain a key called 'description'" + assert "id" in result, "The dictionary must contain a key called 'id'" + assert "parent_id" in result, "The dictionary must contain a key called 'parent_id'" diff --git a/src/backend/tests/unit/api/v1/test_starter_projects.py b/src/backend/tests/unit/api/v1/test_starter_projects.py new file mode 100644 index 000000000000..9d319830f005 --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_starter_projects.py @@ -0,0 +1,10 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_get_starter_projects(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/starter-projects/", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, list), "The result must be a list" diff --git a/src/backend/tests/unit/api/v1/test_store.py b/src/backend/tests/unit/api/v1/test_store.py new file mode 100644 index 000000000000..ca17dc761815 --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_store.py @@ -0,0 +1,12 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_check_if_store_is_enabled(client: AsyncClient): + response = await client.get("api/v1/store/check/") + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The variable must be a dictionary" + assert "enabled" in result, "The dictionary must contain a key called 'enabled'" + assert isinstance(result["enabled"], bool), "There must be a boolean value for the key 'enabled' in the dictionary" diff --git a/src/backend/tests/unit/api/v1/test_users.py b/src/backend/tests/unit/api/v1/test_users.py new file mode 100644 index 000000000000..87814507e09e --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_users.py @@ -0,0 +1,98 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_add_user(client: AsyncClient): + basic_case = {"username": "string", "password": "string"} + response = await client.post("api/v1/users/", json=basic_case) + result = response.json() + + assert response.status_code == status.HTTP_201_CREATED + assert isinstance(result, dict), "The result must be a dictionary" + assert "id" in result, "The result must have an 'id' key" + assert "is_active" in result, "The result must have an 'is_active' key" + assert "is_superuser" in result, "The result must have an 'is_superuser' key" + assert "last_login_at" in result, "The result must have an 'last_login_at' key" + assert "profile_image" in result, "The result must have an 'profile_image' key" + assert "store_api_key" in result, "The result must have an 'store_api_key' key" + assert "updated_at" in result, "The result must have an 'updated_at' key" + assert "username" in result, "The result must have an 'username' key" + + +async def test_read_current_user(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/users/whoami", headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "id" in result, "The result must have an 'id' key" + assert "is_active" in result, "The result must have an 'is_active' key" + assert "is_superuser" in result, "The result must have an 'is_superuser' key" + assert "last_login_at" in result, "The result must have an 'last_login_at' key" + assert "profile_image" in result, "The result must have an 'profile_image' key" + assert "store_api_key" in result, "The result must have an 'store_api_key' key" + assert "updated_at" in result, "The result must have an 'updated_at' key" + assert "username" in result, "The result must have an 'username' key" + + +async def test_read_all_users(client: AsyncClient, logged_in_headers_super_user): + response = await client.get("api/v1/users/", headers=logged_in_headers_super_user) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "total_count" in result, "The result must have an 'total_count' key" + assert "users" in result, "The result must have an 'users' key" + + +async def test_patch_user(client: AsyncClient, logged_in_headers_super_user): + name = "string" + updated_name = "string2" + basic_case = {"username": name, "password": "string"} + _response = await client.post("api/v1/users/", json=basic_case) + _id = _response.json()["id"] + basic_case["username"] = updated_name + response = await client.patch(f"api/v1/users/{_id}", json=basic_case, headers=logged_in_headers_super_user) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "id" in result, "The result must have an 'id' key" + assert "is_active" in result, "The result must have an 'is_active' key" + assert "is_superuser" in result, "The result must have an 'is_superuser' key" + assert "last_login_at" in result, "The result must have an 'last_login_at' key" + assert "profile_image" in result, "The result must have an 'profile_image' key" + assert "store_api_key" in result, "The result must have an 'store_api_key' key" + assert "updated_at" in result, "The result must have an 'updated_at' key" + assert "username" in result, "The result must have an 'username' key" + assert result["username"] == updated_name, "The username must be updated" + + +async def test_reset_password(client: AsyncClient, logged_in_headers, active_user): + _id = str(active_user.id) + basic_case = {"username": "string", "password": "new_password"} + response = await client.patch(f"api/v1/users/{_id}/reset-password", json=basic_case, headers=logged_in_headers) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "id" in result, "The result must have an 'id' key" + assert "is_active" in result, "The result must have an 'is_active' key" + assert "is_superuser" in result, "The result must have an 'is_superuser' key" + assert "last_login_at" in result, "The result must have an 'last_login_at' key" + assert "profile_image" in result, "The result must have an 'profile_image' key" + assert "store_api_key" in result, "The result must have an 'store_api_key' key" + assert "updated_at" in result, "The result must have an 'updated_at' key" + assert "username" in result, "The result must have an 'username' key" + + +async def test_delete_user(client: AsyncClient, logged_in_headers_super_user): + basic_case = {"username": "string", "password": "string"} + _response = await client.post("api/v1/users/", json=basic_case) + _id = _response.json()["id"] + response = await client.delete(f"api/v1/users/{_id}", headers=logged_in_headers_super_user) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "detail" in result, "The result must have an 'detail' key" diff --git a/src/backend/tests/unit/api/v1/test_validate.py b/src/backend/tests/unit/api/v1/test_validate.py new file mode 100644 index 000000000000..56cee8a67a1d --- /dev/null +++ b/src/backend/tests/unit/api/v1/test_validate.py @@ -0,0 +1,56 @@ +from fastapi import status +from httpx import AsyncClient + + +async def test_post_validate_code(client: AsyncClient): + good_code = """ +from pprint import pprint +var = {"a": 1, "b": 2} +pprint(var) + """ + response = await client.post("api/v1/validate/code", json={"code": good_code}) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "imports" in result, "The result must have an 'imports' key" + assert "function" in result, "The result must have a 'function' key" + + +async def test_post_validate_prompt(client: AsyncClient): + basic_case = { + "name": "string", + "template": "string", + "custom_fields": {}, + "frontend_node": { + "template": {}, + "description": "string", + "icon": "string", + "is_input": True, + "is_output": True, + "is_composition": True, + "base_classes": ["string"], + "name": "", + "display_name": "", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "full_path": "string", + "pinned": False, + "conditional_paths": [], + "frozen": False, + "outputs": [], + "field_order": [], + "beta": False, + "error": "string", + "edited": False, + "metadata": {}, + }, + } + response = await client.post("api/v1/validate/prompt", json=basic_case) + result = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(result, dict), "The result must be a dictionary" + assert "frontend_node" in result, "The result must have a 'frontend_node' key" + assert "input_variables" in result, "The result must have an 'input_variables' key" diff --git a/src/backend/tests/unit/api/v1/test_variable.py b/src/backend/tests/unit/api/v1/test_variable.py index 4445e7b10a8f..6c1fe08db5b3 100644 --- a/src/backend/tests/unit/api/v1/test_variable.py +++ b/src/backend/tests/unit/api/v1/test_variable.py @@ -1,8 +1,9 @@ -import pytest -from uuid import uuid4 from unittest import mock +from uuid import uuid4 -from fastapi import status, HTTPException +import pytest +from fastapi import HTTPException, status +from httpx import AsyncClient @pytest.fixture @@ -15,166 +16,173 @@ def body(): } -def test_create_variable(client, body, active_user, logged_in_headers): - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) +@pytest.mark.usefixtures("active_user") +async def test_create_variable(client: AsyncClient, body, logged_in_headers): + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_201_CREATED == response.status_code + assert response.status_code == status.HTTP_201_CREATED assert body["name"] == result["name"] assert body["type"] == result["type"] assert body["default_fields"] == result["default_fields"] - assert "id" in result.keys() - assert "value" not in result.keys() + assert "id" in result + assert body["value"] != result["value"] -def test_create_variable__variable_name_alread_exists(client, body, active_user, logged_in_headers): - client.post("api/v1/variables", json=body, headers=logged_in_headers) +@pytest.mark.usefixtures("active_user") +async def test_create_variable__variable_name_already_exists(client: AsyncClient, body, logged_in_headers): + await client.post("api/v1/variables/", json=body, headers=logged_in_headers) - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_400_BAD_REQUEST == response.status_code + assert response.status_code == status.HTTP_400_BAD_REQUEST assert "Variable name already exists" in result["detail"] -def test_create_variable__variable_name_and_value_cannot_be_empty(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_variable__variable_name_and_value_cannot_be_empty(client: AsyncClient, body, logged_in_headers): body["name"] = "" body["value"] = "" - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_400_BAD_REQUEST == response.status_code + assert response.status_code == status.HTTP_400_BAD_REQUEST assert "Variable name and value cannot be empty" in result["detail"] -def test_create_variable__variable_name_cannot_be_empty(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_variable__variable_name_cannot_be_empty(client: AsyncClient, body, logged_in_headers): body["name"] = "" - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_400_BAD_REQUEST == response.status_code + assert response.status_code == status.HTTP_400_BAD_REQUEST assert "Variable name cannot be empty" in result["detail"] -def test_create_variable__variable_value_cannot_be_empty(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_variable__variable_value_cannot_be_empty(client: AsyncClient, body, logged_in_headers): body["value"] = "" - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_400_BAD_REQUEST == response.status_code + assert response.status_code == status.HTTP_400_BAD_REQUEST assert "Variable value cannot be empty" in result["detail"] -def test_create_variable__HTTPException(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_variable__httpexception(client: AsyncClient, body, logged_in_headers): status_code = 418 generic_message = "I'm a teapot" with mock.patch("langflow.services.auth.utils.encrypt_api_key") as m: m.side_effect = HTTPException(status_code=status_code, detail=generic_message) - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_418_IM_A_TEAPOT == response.status_code + assert response.status_code == status.HTTP_418_IM_A_TEAPOT assert generic_message in result["detail"] -def test_create_variable__Exception(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_variable__exception(client: AsyncClient, body, logged_in_headers): generic_message = "Generic error message" with mock.patch("langflow.services.auth.utils.encrypt_api_key") as m: m.side_effect = Exception(generic_message) - response = client.post("api/v1/variables", json=body, headers=logged_in_headers) + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_500_INTERNAL_SERVER_ERROR == response.status_code + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR assert generic_message in result["detail"] -def test_read_variables(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_read_variables(client: AsyncClient, body, logged_in_headers): names = ["test_variable1", "test_variable2", "test_variable3"] for name in names: body["name"] = name - client.post("api/v1/variables", json=body, headers=logged_in_headers) + await client.post("api/v1/variables/", json=body, headers=logged_in_headers) - response = client.get("api/v1/variables", headers=logged_in_headers) + response = await client.get("api/v1/variables/", headers=logged_in_headers) result = response.json() - assert status.HTTP_200_OK == response.status_code + assert response.status_code == status.HTTP_200_OK assert all(name in [r["name"] for r in result] for name in names) -def test_read_variables__empty(client, active_user, logged_in_headers): - all_variables = client.get("api/v1/variables", headers=logged_in_headers).json() +@pytest.mark.usefixtures("active_user") +async def test_read_variables__empty(client: AsyncClient, logged_in_headers): + all_variables = await client.get("api/v1/variables/", headers=logged_in_headers) + all_variables = all_variables.json() for variable in all_variables: - client.delete(f"api/v1/variables/{variable.get('id')}", headers=logged_in_headers) + await client.delete(f"api/v1/variables/{variable.get('id')}", headers=logged_in_headers) - response = client.get("api/v1/variables", headers=logged_in_headers) + response = await client.get("api/v1/variables/", headers=logged_in_headers) result = response.json() - assert status.HTTP_200_OK == response.status_code - assert [] == result + assert response.status_code == status.HTTP_200_OK + assert result == [] -def test_read_variables__(client, active_user, logged_in_headers): # TODO check if this is correct +@pytest.mark.usefixtures("active_user") +async def test_read_variables__(client: AsyncClient, logged_in_headers): generic_message = "Generic error message" - with pytest.raises(Exception) as exc: - with mock.patch("sqlmodel.Session.exec") as m: - m.side_effect = Exception(generic_message) - - response = client.get("api/v1/variables", headers=logged_in_headers) - result = response.json() - - assert status.HTTP_500_INTERNAL_SERVER_ERROR == response.status_code - assert generic_message in result["detail"] - - assert generic_message in str(exc.value) + with mock.patch("sqlmodel.Session.exec") as m: + m.side_effect = Exception(generic_message) + with pytest.raises(Exception, match=generic_message): + await client.get("api/v1/variables/", headers=logged_in_headers) -def test_update_variable(client, body, active_user, logged_in_headers): - saved = client.post("api/v1/variables", json=body, headers=logged_in_headers).json() +@pytest.mark.usefixtures("active_user") +async def test_update_variable(client: AsyncClient, body, logged_in_headers): + saved = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) + saved = saved.json() body["id"] = saved.get("id") body["name"] = "new_name" body["value"] = "new_value" body["type"] = "new_type" body["default_fields"] = ["new_field"] - response = client.patch(f"api/v1/variables/{saved.get('id')}", json=body, headers=logged_in_headers) + response = await client.patch(f"api/v1/variables/{saved.get('id')}", json=body, headers=logged_in_headers) result = response.json() - assert status.HTTP_200_OK == response.status_code + assert response.status_code == status.HTTP_200_OK assert saved["id"] == result["id"] assert saved["name"] != result["name"] - # assert saved["type"] != result["type"] # TODO check if this is correct assert saved["default_fields"] != result["default_fields"] -def test_update_variable__Exception(client, body, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_update_variable__exception(client: AsyncClient, body, logged_in_headers): wrong_id = uuid4() body["id"] = str(wrong_id) - response = client.patch(f"api/v1/variables/{wrong_id}", json=body, headers=logged_in_headers) + response = await client.patch(f"api/v1/variables/{wrong_id}", json=body, headers=logged_in_headers) result = response.json() - # assert status.HTTP_404_NOT_FOUND == response.status_code # TODO check if this is correct + assert response.status_code == status.HTTP_404_NOT_FOUND assert "Variable not found" in result["detail"] -def test_delete_variable(client, body, active_user, logged_in_headers): - saved = client.post("api/v1/variables", json=body, headers=logged_in_headers).json() - - response = client.delete(f"api/v1/variables/{saved.get('id')}", headers=logged_in_headers) +@pytest.mark.usefixtures("active_user") +async def test_delete_variable(client: AsyncClient, body, logged_in_headers): + response = await client.post("api/v1/variables/", json=body, headers=logged_in_headers) + saved = response.json() + response = await client.delete(f"api/v1/variables/{saved.get('id')}", headers=logged_in_headers) - assert status.HTTP_204_NO_CONTENT == response.status_code + assert response.status_code == status.HTTP_204_NO_CONTENT -def test_delete_variable__Exception(client, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_delete_variable__exception(client: AsyncClient, logged_in_headers): wrong_id = uuid4() - response = client.delete(f"api/v1/variables/{wrong_id}", headers=logged_in_headers) + response = await client.delete(f"api/v1/variables/{wrong_id}", headers=logged_in_headers) - # assert status.HTTP_404_NOT_FOUND == response.status_code # TODO check if this is correct - assert status.HTTP_500_INTERNAL_SERVER_ERROR == response.status_code + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR diff --git a/src/backend/tests/unit/base/__init__.py b/src/backend/tests/unit/base/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/base/load/__init__.py b/src/backend/tests/unit/base/load/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/base/load/test_load.py b/src/backend/tests/unit/base/load/test_load.py new file mode 100644 index 000000000000..4c7927a1044f --- /dev/null +++ b/src/backend/tests/unit/base/load/test_load.py @@ -0,0 +1,30 @@ +import inspect + +from langflow.load import run_flow_from_json + + +def test_run_flow_from_json_params(): + # Define the expected parameters + expected_params = { + "flow", + "input_value", + "session_id", + "tweaks", + "input_type", + "output_type", + "output_component", + "log_level", + "log_file", + "env_file", + "cache", + "disable_logs", + "fallback_to_env_vars", + } + + # Check if the function accepts all expected parameters + func_spec = inspect.getfullargspec(run_flow_from_json) + params = func_spec.args + func_spec.kwonlyargs + assert expected_params.issubset(params), "Not all expected parameters are present in run_flow_from_json" + + # TODO: Add tests by loading a flow and running it need to text with fake llm and check if it returns the + # correct output diff --git a/src/backend/tests/unit/base/tools/__init__.py b/src/backend/tests/unit/base/tools/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/base/tools/test_component_tool.py b/src/backend/tests/unit/base/tools/test_component_tool.py deleted file mode 100644 index 25c547099a4d..000000000000 --- a/src/backend/tests/unit/base/tools/test_component_tool.py +++ /dev/null @@ -1,62 +0,0 @@ -import pytest - -from langflow.base.tools.component_tool import ComponentTool -from langflow.components.inputs.ChatInput import ChatInput - - -@pytest.fixture -def client(): - pass - - -def test_component_tool(): - chat_input = ChatInput() - component_tool = ComponentTool(component=chat_input) - assert component_tool.name == "ChatInput" - assert component_tool.description == chat_input.description - assert component_tool.args == { - "input_value": { - "default": "", - "description": "Message to be passed as input.", - "title": "Input Value", - "type": "string", - }, - "should_store_message": { - "default": True, - "description": "Store the message in the history.", - "title": "Should Store Message", - "type": "boolean", - }, - "sender": { - "default": "User", - "description": "Type of sender.", - "enum": ["Machine", "User"], - "title": "Sender", - "type": "string", - }, - "sender_name": { - "default": "User", - "description": "Name of the sender.", - "title": "Sender Name", - "type": "string", - }, - "session_id": { - "default": "", - "description": "The session ID of the chat. If empty, the current session ID parameter will be used.", - "title": "Session Id", - "type": "string", - }, - "files": { - "default": "", - "description": "Files to be sent with the message.", - "items": {"type": "string"}, - "title": "Files", - "type": "array", - }, - } - assert component_tool.component == chat_input - - result = component_tool.invoke(input=dict(input_value="test")) - assert isinstance(result, dict) - assert hasattr(result["message"], "get_text") - assert result["message"].get_text() == "test" diff --git a/src/backend/tests/unit/base/tools/test_component_toolkit.py b/src/backend/tests/unit/base/tools/test_component_toolkit.py new file mode 100644 index 000000000000..fd05f3a4a102 --- /dev/null +++ b/src/backend/tests/unit/base/tools/test_component_toolkit.py @@ -0,0 +1,60 @@ +import os + +import pytest +from langflow.base.tools.component_tool import ComponentToolkit +from langflow.components.langchain_utilities import ToolCallingAgentComponent +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.tools.calculator import CalculatorToolComponent +from langflow.graph import Graph +from langflow.schema.data import Data +from langflow.services.settings.feature_flags import FEATURE_FLAGS +from pydantic import BaseModel + + +@pytest.fixture +def _add_toolkit_output(): + FEATURE_FLAGS.add_toolkit_output = True + yield + FEATURE_FLAGS.add_toolkit_output = False + + +async def test_component_tool(): + calculator_component = CalculatorToolComponent() + component_toolkit = ComponentToolkit(component=calculator_component) + component_tool = component_toolkit.get_tools()[0] + assert component_tool.name == "CalculatorTool-run_model" + assert issubclass(component_tool.args_schema, BaseModel) + # TODO: fix this + # assert component_tool.args_schema.model_json_schema()["properties"] == { + # "input_value": { + # "default": "", + # "description": "Message to be passed as input.", + # "title": "Input Value", + # "type": "string", + # }, + # } + assert component_toolkit.component == calculator_component + + result = component_tool.invoke(input={"expression": "1+1"}) + assert isinstance(result[0], Data) + assert "result" in result[0].data + assert result[0].result == "2" + + +@pytest.mark.api_key_required +@pytest.mark.usefixtures("_add_toolkit_output") +def test_component_tool_with_api_key(): + chat_output = ChatOutput() + openai_llm = OpenAIModelComponent() + openai_llm.set(api_key=os.environ["OPENAI_API_KEY"]) + tool_calling_agent = ToolCallingAgentComponent() + tool_calling_agent.set( + llm=openai_llm.build_model, tools=[chat_output], input_value="Which tools are available? Please tell its name." + ) + + g = Graph(start=tool_calling_agent, end=tool_calling_agent) + assert g is not None + results = list(g.start()) + assert len(results) == 4 + assert "message_response" in tool_calling_agent._outputs_map["response"].value.get_text() diff --git a/src/backend/tests/unit/components/__init__.py b/src/backend/tests/unit/components/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/agents/__init__.py b/src/backend/tests/unit/components/agents/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/agents/test_agent_component.py b/src/backend/tests/unit/components/agents/test_agent_component.py new file mode 100644 index 000000000000..d20297940126 --- /dev/null +++ b/src/backend/tests/unit/components/agents/test_agent_component.py @@ -0,0 +1,28 @@ +import os + +import pytest +from langflow.components.agents.agent import AgentComponent +from langflow.components.tools.calculator import CalculatorToolComponent + + +@pytest.mark.api_key_required +async def test_agent_component_with_calculator(): + # Mock inputs + tools = [CalculatorToolComponent().build_tool()] # Use the Calculator component as a tool + input_value = "What is 2 + 2?" + + api_key = os.environ["OPENAI_API_KEY"] + temperature = 0.1 + + # Initialize the AgentComponent with mocked inputs + agent = AgentComponent( + tools=tools, + input_value=input_value, + api_key=api_key, + model_name="gpt-4o", + llm_type="OpenAI", + temperature=temperature, + ) + + response = await agent.message_response() + assert "4" in response.data.get("text") diff --git a/src/backend/tests/unit/components/agents/test_agent_events.py b/src/backend/tests/unit/components/agents/test_agent_events.py new file mode 100644 index 000000000000..e1f76bafc20e --- /dev/null +++ b/src/backend/tests/unit/components/agents/test_agent_events.py @@ -0,0 +1,541 @@ +from collections.abc import AsyncIterator +from typing import Any +from unittest.mock import MagicMock + +from langchain_core.agents import AgentFinish +from langflow.base.agents.agent import process_agent_events +from langflow.base.agents.events import ( + handle_on_chain_end, + handle_on_chain_start, + handle_on_chain_stream, + handle_on_tool_end, + handle_on_tool_error, + handle_on_tool_start, +) +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import ToolContent +from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI + + +async def create_event_iterator(events: list[dict[str, Any]]) -> AsyncIterator[dict[str, Any]]: + """Helper function to create an async iterator from a list of events.""" + for event in events: + yield event + + +async def test_chain_start_event(): + """Test handling of on_chain_start event.""" + send_message = MagicMock(side_effect=lambda message: message) + + events = [ + {"event": "on_chain_start", "data": {"input": {"input": "test input", "chat_history": []}}, "start_time": 0} + ] + + # Initialize message with content blocks + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + send_message.return_value = agent_message + + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert result.properties.icon == "Bot" + assert len(result.content_blocks) == 1 + assert result.content_blocks[0].title == "Agent Steps" + + +async def test_chain_end_event(): + """Test handling of on_chain_end event.""" + send_message = MagicMock(side_effect=lambda message: message) + + # Create a mock AgentFinish output + output = AgentFinish(return_values={"output": "final output"}, log="test log") + + events = [{"event": "on_chain_end", "data": {"output": output}, "start_time": 0}] + + # Initialize message with content blocks + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + send_message.return_value = agent_message + + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert result.properties.icon == "Bot" + assert result.properties.state == "complete" + assert result.text == "final output" + + +async def test_tool_start_event(): + """Test handling of on_tool_start event.""" + send_message = MagicMock() + + # Set up the send_message mock to return the modified message + def update_message(message): + # Return a copy of the message to simulate real behavior + return Message(**message.model_dump()) + + send_message.side_effect = update_message + + events = [ + { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + "start_time": 0, + } + ] + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert result.properties.icon == "Bot" + assert len(result.content_blocks) == 1 + assert result.content_blocks[0].title == "Agent Steps" + assert len(result.content_blocks[0].contents) > 0 + tool_content = result.content_blocks[0].contents[-1] + assert isinstance(tool_content, ToolContent) + assert tool_content.name == "test_tool" + assert tool_content.tool_input == {"query": "tool input"}, tool_content + + +async def test_tool_end_event(): + """Test handling of on_tool_end event.""" + send_message = MagicMock(side_effect=lambda message: message) + + events = [ + { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + "start_time": 0, + }, + { + "event": "on_tool_end", + "name": "test_tool", + "run_id": "test_run", + "data": {"output": "tool output"}, + "start_time": 0, + }, + ] + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert len(result.content_blocks) == 1 + tool_content = result.content_blocks[0].contents[-1] + assert tool_content.name == "test_tool" + assert tool_content.output == "tool output" + + +async def test_tool_error_event(): + """Test handling of on_tool_error event.""" + send_message = MagicMock(side_effect=lambda message: message) + + events = [ + { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + "start_time": 0, + }, + { + "event": "on_tool_error", + "name": "test_tool", + "run_id": "test_run", + "data": {"error": "error message"}, + "start_time": 0, + }, + ] + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + tool_content = result.content_blocks[0].contents[-1] + assert tool_content.name == "test_tool" + assert tool_content.error == "error message" + assert tool_content.header["title"] == "Error using **test_tool**" + + +async def test_chain_stream_event(): + """Test handling of on_chain_stream event.""" + send_message = MagicMock(side_effect=lambda message: message) + + events = [{"event": "on_chain_stream", "data": {"chunk": {"output": "streamed output"}}, "start_time": 0}] + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert result.properties.state == "complete" + assert result.text == "streamed output" + + +async def test_multiple_events(): + """Test handling of multiple events in sequence.""" + send_message = MagicMock(side_effect=lambda message: message) + + # Create a mock AgentFinish output instead of MockOutput + output = AgentFinish(return_values={"output": "final output"}, log="test log") + + events = [ + {"event": "on_chain_start", "data": {"input": {"input": "initial input", "chat_history": []}}, "start_time": 0}, + { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + "start_time": 0, + }, + { + "event": "on_tool_end", + "name": "test_tool", + "run_id": "test_run", + "data": {"output": "tool output"}, + "start_time": 0, + }, + {"event": "on_chain_end", "data": {"output": output}, "start_time": 0}, + ] + + # Initialize message with content blocks + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + send_message.return_value = agent_message + + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + assert result.properties.state == "complete" + assert result.properties.icon == "Bot" + assert len(result.content_blocks) == 1 + assert result.text == "final output" + + +async def test_unknown_event(): + """Test handling of unknown event type.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], # Initialize with empty content block + ) + send_message.return_value = agent_message + + events = [{"event": "unknown_event", "data": {"some": "data"}, "start_time": 0}] + + result = await process_agent_events(create_event_iterator(events), agent_message, send_message) + + # Should complete without error and maintain default state + assert result.properties.state == "complete" + # Content blocks should be empty but present + assert len(result.content_blocks) == 1 + assert len(result.content_blocks[0].contents) == 0 + + +# Additional tests for individual handler functions + + +async def test_handle_on_chain_start_with_input(): + """Test handle_on_chain_start with input.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = {"event": "on_chain_start", "data": {"input": {"input": "test input", "chat_history": []}}, "start_time": 0} + + updated_message, start_time = handle_on_chain_start(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert len(updated_message.content_blocks) == 1 + assert updated_message.content_blocks[0].title == "Agent Steps" + assert isinstance(start_time, float) + + +async def test_handle_on_chain_start_no_input(): + """Test handle_on_chain_start without input.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = {"event": "on_chain_start", "data": {}, "start_time": 0} + + updated_message, start_time = handle_on_chain_start(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert len(updated_message.content_blocks) == 1 + assert len(updated_message.content_blocks[0].contents) == 0 + assert isinstance(start_time, float) + + +async def test_handle_on_chain_end_with_output(): + """Test handle_on_chain_end with output.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + + output = AgentFinish(return_values={"output": "final output"}, log="test log") + event = {"event": "on_chain_end", "data": {"output": output}, "start_time": 0} + + updated_message, start_time = handle_on_chain_end(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert updated_message.properties.state == "complete" + assert updated_message.text == "final output" + assert isinstance(start_time, float) + + +async def test_handle_on_chain_end_no_output(): + """Test handle_on_chain_end without output key in data.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = {"event": "on_chain_end", "data": {}, "start_time": 0} + + updated_message, start_time = handle_on_chain_end(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert updated_message.properties.state == "partial" + assert updated_message.text == "" + assert isinstance(start_time, float) + + +async def test_handle_on_chain_end_empty_data(): + """Test handle_on_chain_end with empty data.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = {"event": "on_chain_end", "data": {"output": None}, "start_time": 0} + + updated_message, start_time = handle_on_chain_end(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert updated_message.properties.state == "partial" + assert updated_message.text == "" + assert isinstance(start_time, float) + + +async def test_handle_on_chain_end_with_empty_return_values(): + """Test handle_on_chain_end with empty return_values.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + + class MockOutputEmptyReturnValues: + def __init__(self): + self.return_values = {} + + event = {"event": "on_chain_end", "data": {"output": MockOutputEmptyReturnValues()}, "start_time": 0} + + updated_message, start_time = handle_on_chain_end(event, agent_message, send_message, 0.0) + + assert updated_message.properties.icon == "Bot" + assert updated_message.properties.state == "partial" + assert updated_message.text == "" + assert isinstance(start_time, float) + + +async def test_handle_on_tool_start(): + """Test handle_on_tool_start event.""" + send_message = MagicMock(side_effect=lambda message: message) + tool_blocks_map = {} + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + "start_time": 0, + } + + updated_message, start_time = handle_on_tool_start(event, agent_message, tool_blocks_map, send_message, 0.0) + + assert len(updated_message.content_blocks) == 1 + assert len(updated_message.content_blocks[0].contents) > 0 + tool_key = f"{event['name']}_{event['run_id']}" + tool_content = updated_message.content_blocks[0].contents[-1] + assert tool_content == tool_blocks_map.get(tool_key) + assert isinstance(tool_content, ToolContent) + assert tool_content.name == "test_tool" + assert tool_content.tool_input == {"query": "tool input"} + assert isinstance(tool_content.duration, int) + assert isinstance(start_time, float) + + +async def test_handle_on_tool_end(): + """Test handle_on_tool_end event.""" + send_message = MagicMock(side_effect=lambda message: message) + tool_blocks_map = {} + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + + start_event = { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + } + agent_message, _ = handle_on_tool_start(start_event, agent_message, tool_blocks_map, send_message, 0.0) + + end_event = { + "event": "on_tool_end", + "name": "test_tool", + "run_id": "test_run", + "data": {"output": "tool output"}, + "start_time": 0, + } + + updated_message, start_time = handle_on_tool_end(end_event, agent_message, tool_blocks_map, send_message, 0.0) + + f"{end_event['name']}_{end_event['run_id']}" + tool_content = updated_message.content_blocks[0].contents[-1] + assert tool_content.name == "test_tool" + assert tool_content.output == "tool output" + assert isinstance(tool_content.duration, int) + assert isinstance(start_time, float) + + +async def test_handle_on_tool_error(): + """Test handle_on_tool_error event.""" + send_message = MagicMock(side_effect=lambda message: message) + tool_blocks_map = {} + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + + start_event = { + "event": "on_tool_start", + "name": "test_tool", + "run_id": "test_run", + "data": {"input": {"query": "tool input"}}, + } + agent_message, _ = handle_on_tool_start(start_event, agent_message, tool_blocks_map, send_message, 0.0) + + error_event = { + "event": "on_tool_error", + "name": "test_tool", + "run_id": "test_run", + "data": {"error": "error message"}, + "start_time": 0, + } + + updated_message, start_time = handle_on_tool_error(error_event, agent_message, tool_blocks_map, send_message, 0.0) + + tool_content = updated_message.content_blocks[0].contents[-1] + assert tool_content.name == "test_tool" + assert tool_content.error == "error message" + assert tool_content.header["title"] == "Error using **test_tool**" + assert isinstance(tool_content.duration, int) + assert isinstance(start_time, float) + + +async def test_handle_on_chain_stream_with_output(): + """Test handle_on_chain_stream with output.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + ) + event = { + "event": "on_chain_stream", + "data": {"chunk": {"output": "streamed output"}}, + } + + updated_message, start_time = handle_on_chain_stream(event, agent_message, send_message, 0.0) + + assert updated_message.text == "streamed output" + assert updated_message.properties.state == "complete" + assert isinstance(start_time, float) + + +async def test_handle_on_chain_stream_no_output(): + """Test handle_on_chain_stream without output.""" + send_message = MagicMock(side_effect=lambda message: message) + agent_message = Message( + sender=MESSAGE_SENDER_AI, + sender_name="Agent", + properties={"icon": "Bot", "state": "partial"}, + content_blocks=[ContentBlock(title="Agent Steps", contents=[])], + session_id="test_session_id", + ) + event = { + "event": "on_chain_stream", + "data": {"chunk": {}}, + } + + updated_message, start_time = handle_on_chain_stream(event, agent_message, send_message, 0.0) + + assert updated_message.text == "" + assert updated_message.properties.state == "partial" + assert isinstance(start_time, float) diff --git a/src/backend/tests/unit/components/agents/test_tool_calling_agent.py b/src/backend/tests/unit/components/agents/test_tool_calling_agent.py new file mode 100644 index 000000000000..5affc0e576cd --- /dev/null +++ b/src/backend/tests/unit/components/agents/test_tool_calling_agent.py @@ -0,0 +1,29 @@ +import os + +import pytest +from langflow.components.langchain_utilities import ToolCallingAgentComponent +from langflow.components.models.openai import OpenAIModelComponent +from langflow.components.tools.calculator import CalculatorToolComponent + + +@pytest.mark.api_key_required +async def test_tool_calling_agent_component(): + tools = [CalculatorToolComponent().build_tool()] # Use the Calculator component as a tool + input_value = "What is 2 + 2?" + chat_history = [] + api_key = os.environ["OPENAI_API_KEY"] + temperature = 0.1 + + # Default OpenAI Model Component + llm_component = OpenAIModelComponent().set( + api_key=api_key, + temperature=temperature, + ) + llm = llm_component.build_model() + + agent = ToolCallingAgentComponent() + agent.set(llm=llm, tools=[tools], chat_history=chat_history, input_value=input_value) + + # Chat output + response = await agent.message_response() + assert "4" in response.data.get("text") diff --git a/src/backend/tests/unit/components/helpers/__init__.py b/src/backend/tests/unit/components/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/helpers/test_structured_output_component.py b/src/backend/tests/unit/components/helpers/test_structured_output_component.py new file mode 100644 index 000000000000..bce79b4b5fef --- /dev/null +++ b/src/backend/tests/unit/components/helpers/test_structured_output_component.py @@ -0,0 +1,265 @@ +from unittest.mock import MagicMock, patch + +import pytest +from langchain_core.language_models import BaseLanguageModel +from langflow.components.helpers.structured_output import StructuredOutputComponent +from langflow.schema.data import Data +from pydantic import BaseModel +from typing_extensions import override + + +class TestStructuredOutputComponent: + # Ensure that the structured output is successfully generated with the correct BaseModel instance returned by + # the mock function + def test_successful_structured_output_generation_with_patch_with_config(self): + from unittest.mock import patch + + class MockLanguageModel(BaseLanguageModel): + @override + def with_structured_output(self, *args, **kwargs): + return self + + @override + def with_config(self, *args, **kwargs): + return self + + @override + def invoke(self, *args, **kwargs): + return self + + @override + def generate_prompt(self, *args, **kwargs): + raise NotImplementedError + + @override + async def agenerate_prompt(self, *args, **kwargs): + raise NotImplementedError + + @override + def predict(self, *args, **kwargs): + raise NotImplementedError + + @override + def predict_messages(self, *args, **kwargs): + raise NotImplementedError + + @override + async def apredict(self, *args, **kwargs): + raise NotImplementedError + + @override + async def apredict_messages(self, *args, **kwargs): + raise NotImplementedError + + def mock_get_chat_result(runnable, input_value, config): # noqa: ARG001 + class MockBaseModel(BaseModel): + @override + def model_dump(self, **kwargs): + return {"field": "value"} + + return MockBaseModel() + + component = StructuredOutputComponent( + llm=MockLanguageModel(), + input_value="Test input", + schema_name="TestSchema", + output_schema=[{"name": "field", "type": "str", "description": "A test field"}], + multiple=False, + ) + + with patch("langflow.components.helpers.structured_output.get_chat_result", mock_get_chat_result): + result = component.build_structured_output() + assert isinstance(result, Data) + assert result.data == {"field": "value"} + + # Raises ValueError when the language model does not support structured output + def test_raises_value_error_for_unsupported_language_model(self): + # Mocking an incompatible language model + class MockLanguageModel: + pass + + # Creating an instance of StructuredOutputComponent + component = StructuredOutputComponent( + llm=MockLanguageModel(), + input_value="Test input", + schema_name="TestSchema", + output_schema=[{"name": "field", "type": "str", "description": "A test field"}], + multiple=False, + ) + + with pytest.raises(TypeError, match="Language model does not support structured output."): + component.build_structured_output() + + # Correctly builds the output model from the provided schema + def test_correctly_builds_output_model(self): + # Import internal organization modules, packages, and libraries + from langflow.helpers.base_model import build_model_from_schema + from langflow.inputs.inputs import TableInput + + # Setup + component = StructuredOutputComponent() + schema = [ + { + "name": "name", + "display_name": "Name", + "type": "str", + "description": "Specify the name of the output field.", + }, + { + "name": "description", + "display_name": "Description", + "type": "str", + "description": "Describe the purpose of the output field.", + }, + { + "name": "type", + "display_name": "Type", + "type": "str", + "description": ( + "Indicate the data type of the output field " "(e.g., str, int, float, bool, list, dict)." + ), + }, + { + "name": "multiple", + "display_name": "Multiple", + "type": "boolean", + "description": "Set to True if this output field should be a list of the specified type.", + }, + ] + component.output_schema = TableInput(name="output_schema", display_name="Output Schema", table_schema=schema) + + # Assertion + output_model = build_model_from_schema(schema) + assert isinstance(output_model, type) + + # Properly handles multiple outputs when 'multiple' is set to True + def test_handles_multiple_outputs(self): + # Import internal organization modules, packages, and libraries + from langflow.helpers.base_model import build_model_from_schema + from langflow.inputs.inputs import TableInput + + # Setup + component = StructuredOutputComponent() + schema = [ + { + "name": "name", + "display_name": "Name", + "type": "str", + "description": "Specify the name of the output field.", + }, + { + "name": "description", + "display_name": "Description", + "type": "str", + "description": "Describe the purpose of the output field.", + }, + { + "name": "type", + "display_name": "Type", + "type": "str", + "description": ( + "Indicate the data type of the output field " "(e.g., str, int, float, bool, list, dict)." + ), + }, + { + "name": "multiple", + "display_name": "Multiple", + "type": "boolean", + "description": "Set to True if this output field should be a list of the specified type.", + }, + ] + component.output_schema = TableInput(name="output_schema", display_name="Output Schema", table_schema=schema) + component.multiple = True + + # Assertion + output_model = build_model_from_schema(schema) + assert isinstance(output_model, type) + + def test_empty_output_schema(self): + component = StructuredOutputComponent( + llm=MagicMock(), + input_value="Test input", + schema_name="EmptySchema", + output_schema=[], + multiple=False, + ) + + with pytest.raises(ValueError, match="Output schema cannot be empty"): + component.build_structured_output() + + def test_invalid_output_schema_type(self): + component = StructuredOutputComponent( + llm=MagicMock(), + input_value="Test input", + schema_name="InvalidSchema", + output_schema=[{"name": "field", "type": "invalid_type", "description": "Invalid field"}], + multiple=False, + ) + + with pytest.raises(ValueError, match="Invalid type: invalid_type"): + component.build_structured_output() + + @patch("langflow.components.helpers.structured_output.get_chat_result") + def test_nested_output_schema(self, mock_get_chat_result): + class ChildModel(BaseModel): + child: str = "value" + + class ParentModel(BaseModel): + parent: ChildModel = ChildModel() + + mock_llm = MagicMock() + mock_llm.with_structured_output.return_value = mock_llm + mock_get_chat_result.return_value = ParentModel(parent=ChildModel(child="value")) + + component = StructuredOutputComponent( + llm=mock_llm, + input_value="Test input", + schema_name="NestedSchema", + output_schema=[ + { + "name": "parent", + "type": "dict", + "description": "Parent field", + "fields": [{"name": "child", "type": "str", "description": "Child field"}], + } + ], + multiple=False, + ) + + result = component.build_structured_output() + assert isinstance(result, Data) + assert result.data == {"parent": {"child": "value"}} + + @patch("langflow.components.helpers.structured_output.get_chat_result") + def test_large_input_value(self, mock_get_chat_result): + large_input = "Test input " * 1000 + + class MockBaseModel(BaseModel): + field: str = "value" + + mock_get_chat_result.return_value = MockBaseModel(field="value") + + component = StructuredOutputComponent( + llm=MagicMock(), + input_value=large_input, + schema_name="LargeInputSchema", + output_schema=[{"name": "field", "type": "str", "description": "A test field"}], + multiple=False, + ) + + result = component.build_structured_output() + assert isinstance(result, Data) + assert result.data == {"field": "value"} + mock_get_chat_result.assert_called_once() + + def test_invalid_llm_config(self): + component = StructuredOutputComponent( + llm="invalid_llm", # Not a proper LLM instance + input_value="Test input", + schema_name="InvalidLLMSchema", + output_schema=[{"name": "field", "type": "str", "description": "A test field"}], + multiple=False, + ) + + with pytest.raises(TypeError, match="Language model does not support structured output."): + component.build_structured_output() diff --git a/src/backend/tests/unit/components/inputs/__init__.py b/src/backend/tests/unit/components/inputs/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/inputs/test_input_components.py b/src/backend/tests/unit/components/inputs/test_input_components.py new file mode 100644 index 000000000000..d8dcc7fd741a --- /dev/null +++ b/src/backend/tests/unit/components/inputs/test_input_components.py @@ -0,0 +1,147 @@ +import pytest +from langflow.components.inputs import ChatInput, TextInputComponent +from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER + +from tests.base import ComponentTestBaseWithClient, ComponentTestBaseWithoutClient + + +@pytest.mark.usefixtures("client") +class TestChatInput(ComponentTestBaseWithClient): + @pytest.fixture + def component_class(self): + return ChatInput + + @pytest.fixture + def default_kwargs(self): + return { + "input_value": "Hello, how are you?", + "should_store_message": True, + "sender": MESSAGE_SENDER_USER, + "sender_name": MESSAGE_SENDER_NAME_USER, + "session_id": "test_session_123", + "files": [], + "background_color": "#f0f0f0", + "chat_icon": "👤", + "text_color": "#000000", + } + + @pytest.fixture + def file_names_mapping(self): + return [ + {"version": "1.0.15", "module": "inputs", "file_name": "ChatInput"}, + {"version": "1.0.16", "module": "inputs", "file_name": "ChatInput"}, + {"version": "1.0.17", "module": "inputs", "file_name": "ChatInput"}, + {"version": "1.0.18", "module": "inputs", "file_name": "ChatInput"}, + {"version": "1.0.19", "module": "inputs", "file_name": "ChatInput"}, + ] + + def test_message_response(self, component_class, default_kwargs): + """Test that the message_response method returns a valid Message object.""" + component = component_class(**default_kwargs) + message = component.message_response() + + assert isinstance(message, Message) + assert message.text == default_kwargs["input_value"] + assert message.sender == default_kwargs["sender"] + assert message.sender_name == default_kwargs["sender_name"] + assert message.session_id == default_kwargs["session_id"] + assert message.files == default_kwargs["files"] + assert message.properties.model_dump() == { + "background_color": default_kwargs["background_color"], + "text_color": default_kwargs["text_color"], + "icon": default_kwargs["chat_icon"], + "edited": False, + "source": {"id": None, "display_name": None, "source": None}, + "allow_markdown": False, + "state": "complete", + "targets": [], + } + + def test_message_response_ai_sender(self, component_class): + """Test message response with AI sender type.""" + kwargs = { + "input_value": "I am an AI assistant", + "sender": MESSAGE_SENDER_AI, + "sender_name": "AI Assistant", + "session_id": "test_session_123", + } + component = component_class(**kwargs) + message = component.message_response() + + assert isinstance(message, Message) + assert message.sender == MESSAGE_SENDER_AI + assert message.sender_name == "AI Assistant" + + def test_message_response_without_session(self, component_class): + """Test message response without session ID.""" + kwargs = { + "input_value": "Test message", + "sender": MESSAGE_SENDER_USER, + "sender_name": MESSAGE_SENDER_NAME_USER, + "session_id": "", # Empty session ID + } + component = component_class(**kwargs) + message = component.message_response() + + assert isinstance(message, Message) + assert message.session_id == "" + + def test_message_response_with_files(self, component_class, tmp_path): + """Test message response with file attachments.""" + # Create a temporary test file + test_file = tmp_path / "test.txt" + test_file.write_text("Test content") + + kwargs = { + "input_value": "Message with file", + "sender": MESSAGE_SENDER_USER, + "sender_name": MESSAGE_SENDER_NAME_USER, + "session_id": "test_session_123", + "files": [str(test_file)], + } + component = component_class(**kwargs) + message = component.message_response() + + assert isinstance(message, Message) + assert len(message.files) == 1 + assert message.files[0] == str(test_file) + + def test_message_storage_disabled(self, component_class): + """Test message response when storage is disabled.""" + kwargs = { + "input_value": "Test message", + "should_store_message": False, + "sender": MESSAGE_SENDER_USER, + "sender_name": MESSAGE_SENDER_NAME_USER, + "session_id": "test_session_123", + } + component = component_class(**kwargs) + message = component.message_response() + + assert isinstance(message, Message) + # The message should still be created but not stored + assert message.text == "Test message" + + +class TestTextInputComponent(ComponentTestBaseWithoutClient): + @pytest.fixture + def component_class(self): + return TextInputComponent + + @pytest.fixture + def default_kwargs(self): + return { + "input_value": "Hello, world!", + "data_template": "{text}", + } + + @pytest.fixture + def file_names_mapping(self): + return [ + {"version": "1.0.15", "module": "inputs", "file_name": "TextInput"}, + {"version": "1.0.16", "module": "inputs", "file_name": "TextInput"}, + {"version": "1.0.17", "module": "inputs", "file_name": "TextInput"}, + {"version": "1.0.18", "module": "inputs", "file_name": "TextInput"}, + {"version": "1.0.19", "module": "inputs", "file_name": "TextInput"}, + ] diff --git a/src/backend/tests/unit/components/models/__init__.py b/src/backend/tests/unit/components/models/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/models/test_chatollama_component.py b/src/backend/tests/unit/components/models/test_chatollama_component.py new file mode 100644 index 000000000000..0f5784de947f --- /dev/null +++ b/src/backend/tests/unit/components/models/test_chatollama_component.py @@ -0,0 +1,126 @@ +from unittest.mock import MagicMock, patch +from urllib.parse import urljoin + +import pytest +from langchain_ollama import ChatOllama +from langflow.components.models import ChatOllamaComponent + + +@pytest.fixture +def component(): + return ChatOllamaComponent() + + +@patch("httpx.Client.get") +def test_get_model_success(mock_get, component): + mock_response = MagicMock() + mock_response.json.return_value = {"models": [{"name": "model1"}, {"name": "model2"}]} + mock_response.raise_for_status.return_value = None + mock_get.return_value = mock_response + + base_url = "http://localhost:11434" + + model_names = component.get_model(base_url) + + expected_url = urljoin(base_url, "/api/tags") + + mock_get.assert_called_once_with(expected_url) + + assert model_names == ["model1", "model2"] + + +@patch("httpx.Client.get") +def test_get_model_failure(mock_get, component): + # Mock the response for the HTTP GET request to raise an exception + mock_get.side_effect = Exception("HTTP request failed") + + url = "http://localhost:11434/api/tags" + + # Assert that the ValueError is raised when an exception occurs + with pytest.raises(ValueError, match="Could not retrieve models"): + component.get_model(url) + + +def test_update_build_config_mirostat_disabled(component): + build_config = { + "mirostat_eta": {"advanced": False, "value": 0.1}, + "mirostat_tau": {"advanced": False, "value": 5}, + } + field_value = "Disabled" + field_name = "mirostat" + + updated_config = component.update_build_config(build_config, field_value, field_name) + + assert updated_config["mirostat_eta"]["advanced"] is True + assert updated_config["mirostat_tau"]["advanced"] is True + assert updated_config["mirostat_eta"]["value"] is None + assert updated_config["mirostat_tau"]["value"] is None + + +def test_update_build_config_mirostat_enabled(component): + build_config = { + "mirostat_eta": {"advanced": False, "value": None}, + "mirostat_tau": {"advanced": False, "value": None}, + } + field_value = "Mirostat 2.0" + field_name = "mirostat" + + updated_config = component.update_build_config(build_config, field_value, field_name) + + assert updated_config["mirostat_eta"]["advanced"] is False + assert updated_config["mirostat_tau"]["advanced"] is False + assert updated_config["mirostat_eta"]["value"] == 0.2 + assert updated_config["mirostat_tau"]["value"] == 10 + + +@patch("httpx.Client.get") +def test_update_build_config_model_name(mock_get, component): + # Mock the response for the HTTP GET request + mock_response = MagicMock() + mock_response.json.return_value = {"models": [{"name": "model1"}, {"name": "model2"}]} + mock_response.raise_for_status.return_value = None + mock_get.return_value = mock_response + + build_config = { + "base_url": {"load_from_db": False, "value": None}, + "model_name": {"options": []}, + } + field_value = None + field_name = "model_name" + + updated_config = component.update_build_config(build_config, field_value, field_name) + + assert updated_config["model_name"]["options"] == ["model1", "model2"] + + +def test_update_build_config_keep_alive(component): + build_config = {"keep_alive": {"value": None, "advanced": False}} + field_value = "Keep" + field_name = "keep_alive_flag" + + updated_config = component.update_build_config(build_config, field_value, field_name) + assert updated_config["keep_alive"]["value"] == "-1" + assert updated_config["keep_alive"]["advanced"] is True + + field_value = "Immediately" + updated_config = component.update_build_config(build_config, field_value, field_name) + assert updated_config["keep_alive"]["value"] == "0" + assert updated_config["keep_alive"]["advanced"] is True + + +@patch( + "langchain_community.chat_models.ChatOllama", + return_value=ChatOllama(base_url="http://localhost:11434", model="llama3.1"), +) +def test_build_model(_mock_chat_ollama, component): # noqa: PT019 + component.base_url = "http://localhost:11434" + component.model_name = "llama3.1" + component.mirostat = "Mirostat 2.0" + component.mirostat_eta = 0.2 # Ensure this is set as a float + component.mirostat_tau = 10.0 # Ensure this is set as a float + component.temperature = 0.2 + component.verbose = True + model = component.build_model() + assert isinstance(model, ChatOllama) + assert model.base_url == "http://localhost:11434" + assert model.model == "llama3.1" diff --git a/src/backend/tests/unit/components/models/test_huggingface.py b/src/backend/tests/unit/components/models/test_huggingface.py new file mode 100644 index 000000000000..b813c5b473a9 --- /dev/null +++ b/src/backend/tests/unit/components/models/test_huggingface.py @@ -0,0 +1,30 @@ +from langflow.components.models.huggingface import HuggingFaceEndpointsComponent +from langflow.inputs.inputs import DictInput, DropdownInput, FloatInput, HandleInput, IntInput, SecretStrInput, StrInput + + +def test_huggingface_inputs(): + component = HuggingFaceEndpointsComponent() + inputs = component.inputs + + # Define expected input types and their names + expected_inputs = { + "model_id": StrInput, + "max_new_tokens": IntInput, + "top_k": IntInput, + "top_p": FloatInput, + "typical_p": FloatInput, + "temperature": FloatInput, + "repetition_penalty": FloatInput, + "inference_endpoint": StrInput, + "task": DropdownInput, + "huggingfacehub_api_token": SecretStrInput, + "model_kwargs": DictInput, + "retry_attempts": IntInput, + "output_parser": HandleInput, + } + + # Check if all expected inputs are present + for name, input_type in expected_inputs.items(): + assert any( + isinstance(inp, input_type) and inp.name == name for inp in inputs + ), f"Missing or incorrect input: {name}" diff --git a/src/backend/tests/unit/components/outputs/__init__.py b/src/backend/tests/unit/components/outputs/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/outputs/test_output_components.py b/src/backend/tests/unit/components/outputs/test_output_components.py new file mode 100644 index 000000000000..78a1bb1180ce --- /dev/null +++ b/src/backend/tests/unit/components/outputs/test_output_components.py @@ -0,0 +1,56 @@ +import pytest +from langflow.components.outputs import ChatOutput, TextOutputComponent +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI + +from tests.base import ComponentTestBaseWithClient, ComponentTestBaseWithoutClient + + +@pytest.mark.usefixtures("client") +class TestChatOutput(ComponentTestBaseWithClient): + @pytest.fixture + def component_class(self): + return ChatOutput + + @pytest.fixture + def default_kwargs(self): + return { + "input_value": "Hello, how are you?", + "should_store_message": True, + "sender": MESSAGE_SENDER_AI, + "sender_name": MESSAGE_SENDER_NAME_AI, + "session_id": "test_session_123", + "data_template": "{text}", + "background_color": "#f0f0f0", + "chat_icon": "🤖", + "text_color": "#000000", + } + + @pytest.fixture + def file_names_mapping(self): + return [ + {"version": "1.0.15", "module": "outputs", "file_name": "ChatOutput"}, + {"version": "1.0.16", "module": "outputs", "file_name": "ChatOutput"}, + {"version": "1.0.17", "module": "outputs", "file_name": "ChatOutput"}, + {"version": "1.0.18", "module": "outputs", "file_name": "ChatOutput"}, + {"version": "1.0.19", "module": "outputs", "file_name": "ChatOutput"}, + ] + + +class TestTextOutputComponent(ComponentTestBaseWithoutClient): + @pytest.fixture + def component_class(self): + return TextOutputComponent + + @pytest.fixture + def default_kwargs(self): + return { + "input_value": "Hello, world!", + } + + @pytest.fixture + def file_names_mapping(self): + return [ + {"version": "1.0.17", "module": "outputs", "file_name": "TextOutput"}, + {"version": "1.0.18", "module": "outputs", "file_name": "TextOutput"}, + {"version": "1.0.19", "module": "outputs", "file_name": "TextOutput"}, + ] diff --git a/src/backend/tests/unit/components/prompts/__init__.py b/src/backend/tests/unit/components/prompts/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/prompts/test_prompt_component.py b/src/backend/tests/unit/components/prompts/test_prompt_component.py index 8f8494013817..ba3bbf772019 100644 --- a/src/backend/tests/unit/components/prompts/test_prompt_component.py +++ b/src/backend/tests/unit/components/prompts/test_prompt_component.py @@ -1,19 +1,38 @@ import pytest +from langflow.components.prompts import PromptComponent -from langflow.components.prompts.Prompt import PromptComponent # type: ignore +from tests.base import ComponentTestBaseWithClient -@pytest.fixture -def client(): - pass +@pytest.mark.usefixtures("client") +class TestPromptComponent(ComponentTestBaseWithClient): + @pytest.fixture + def component_class(self): + return PromptComponent + @pytest.fixture + def default_kwargs(self): + return {"template": "Hello {name}!", "name": "John", "_session_id": "123"} -class TestPromptComponent: - def test_post_code_processing(self): - component = PromptComponent(template="Hello {name}!", name="John") + @pytest.fixture + def file_names_mapping(self): + return [ + {"version": "1.0.15", "module": "prompts", "file_name": "Prompt"}, + {"version": "1.0.16", "module": "prompts", "file_name": "Prompt"}, + {"version": "1.0.17", "module": "prompts", "file_name": "Prompt"}, + {"version": "1.0.18", "module": "prompts", "file_name": "Prompt"}, + {"version": "1.0.19", "module": "prompts", "file_name": "Prompt"}, + ] + + def test_post_code_processing(self, component_class, default_kwargs): + component = component_class(**default_kwargs) frontend_node = component.to_frontend_node() node_data = frontend_node["data"]["node"] assert node_data["template"]["template"]["value"] == "Hello {name}!" assert "name" in node_data["custom_fields"]["template"] assert "name" in node_data["template"] assert node_data["template"]["name"]["value"] == "John" + + def test_prompt_component_latest(self, component_class, default_kwargs): + result = component_class(**default_kwargs)() + assert result is not None diff --git a/src/backend/tests/unit/components/prototypes/__init__.py b/src/backend/tests/unit/components/prototypes/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/prototypes/test_create_data_component.py b/src/backend/tests/unit/components/prototypes/test_create_data_component.py new file mode 100644 index 000000000000..22fb8a23df54 --- /dev/null +++ b/src/backend/tests/unit/components/prototypes/test_create_data_component.py @@ -0,0 +1,110 @@ +import pytest +from langflow.components.processing import CreateDataComponent +from langflow.schema import Data + + +@pytest.fixture +def create_data_component(): + return CreateDataComponent() + + +def test_update_build_config(create_data_component): + build_config = { + "number_of_fields": { + "type": "int", + "value": 2, + }, + "text_key": { + "type": "str", + "value": "", + }, + "text_key_validator": { + "type": "bool", + "value": False, + }, + } + updated_config = create_data_component.update_build_config( + build_config=build_config, field_value=3, field_name="number_of_fields" + ) + + assert "field_1_key" in updated_config + assert "field_2_key" in updated_config + assert "field_3_key" in updated_config + assert updated_config["number_of_fields"]["value"] == 3 + + +def test_update_build_config_exceed_limit(create_data_component): + build_config = { + "number_of_fields": { + "type": "int", + "value": 2, + }, + "text_key": { + "type": "str", + "value": "", + }, + "text_key_validator": { + "type": "bool", + "value": False, + }, + } + with pytest.raises(ValueError, match="Number of fields cannot exceed 15."): + create_data_component.update_build_config(build_config, 16, "number_of_fields") + + +async def test_build_data(create_data_component): + create_data_component._attributes = { + "field_1_key": {"key1": "value1"}, + "field_2_key": {"key2": "value2"}, + } + create_data_component.text_key = "key1" + create_data_component.text_key_validator = False + + result = await create_data_component.build_data() + + assert isinstance(result, Data) + assert result.data == {"key1": "value1", "key2": "value2"} + assert result.text_key == "key1" + + +def test_get_data(create_data_component): + create_data_component._attributes = { + "field_1_key": {"key1": "value1"}, + "field_2_key": {"key2": "value2"}, + } + + result = create_data_component.get_data() + + assert result == {"key1": "value1", "key2": "value2"} + + +def test_validate_text_key_valid(create_data_component): + # Arrange + create_data_component._attributes = { + "field_1_key": {"key1": "value1"}, + "field_2_key": {"key2": "value2"}, + } + create_data_component.text_key = "key1" + + # Act & Assert + try: + create_data_component.validate_text_key() + except ValueError: + pytest.fail("validate_text_key() raised ValueError unexpectedly!") + + # Additional assertions + assert create_data_component.text_key == "key1" + assert "key1" in create_data_component.get_data() + + +def test_validate_text_key_invalid(create_data_component): + # Arrange + create_data_component._attributes = { + "field_1_key": {"key1": "value1"}, + "field_2_key": {"key2": "value2"}, + } + create_data_component.text_key = "invalid_key" + + # Act & Assert + with pytest.raises(ValueError, match="Text Key: 'invalid_key' not found in the Data keys: 'key1, key2'"): + create_data_component.validate_text_key() diff --git a/src/backend/tests/unit/components/prototypes/test_update_data_component.py b/src/backend/tests/unit/components/prototypes/test_update_data_component.py new file mode 100644 index 000000000000..747b37569f2c --- /dev/null +++ b/src/backend/tests/unit/components/prototypes/test_update_data_component.py @@ -0,0 +1,100 @@ +import pytest +from langflow.components.processing import UpdateDataComponent +from langflow.schema import Data + + +@pytest.fixture +def update_data_component(): + return UpdateDataComponent() + + +def test_update_build_config(update_data_component): + build_config = { + "number_of_fields": { + "type": "int", + "value": 2, + }, + "text_key": { + "type": "str", + "value": "", + }, + "text_key_validator": { + "type": "bool", + "value": False, + }, + } + updated_config = update_data_component.update_build_config( + build_config=build_config, field_value=3, field_name="number_of_fields" + ) + + assert "field_1_key" in updated_config + assert "field_2_key" in updated_config + assert "field_3_key" in updated_config + assert updated_config["number_of_fields"]["value"] == 3 + + +def test_update_build_config_exceed_limit(update_data_component): + build_config = { + "number_of_fields": { + "type": "int", + "value": 2, + }, + "text_key": { + "type": "str", + "value": "", + }, + "text_key_validator": { + "type": "bool", + "value": False, + }, + } + with pytest.raises(ValueError, match="Number of fields cannot exceed 15."): + update_data_component.update_build_config(build_config, 16, "number_of_fields") + + +async def test_build_data(update_data_component): + update_data_component._attributes = { + "field_1_key": {"key1": "new_value1"}, + "field_2_key": {"key3": "value3"}, + } + update_data_component.text_key = "key1" + update_data_component.text_key_validator = False + update_data_component.old_data = Data(data={"key1": "old_value1", "key2": "value2"}, text_key="key2") + + result = await update_data_component.build_data() + + assert isinstance(result, Data) + assert result.data == {"key1": "new_value1", "key2": "value2", "key3": "value3"} + assert result.text_key == "key1" + + +def test_get_data(update_data_component): + update_data_component._attributes = { + "field_1_key": {"key1": "value1"}, + "field_2_key": {"key2": "value2"}, + } + + result = update_data_component.get_data() + + assert result == {"key1": "value1", "key2": "value2"} + + +def test_validate_text_key_valid(update_data_component): + data = Data(data={"key1": "value1", "key2": "value2"}, text_key="key1") + update_data_component.text_key = "key1" + + try: + update_data_component.validate_text_key(data) + except ValueError: + pytest.fail("validate_text_key() raised ValueError unexpectedly!") + + +def test_validate_text_key_invalid(update_data_component): + data = Data(data={"key1": "value1", "key2": "value2"}, text_key="key1") + update_data_component.text_key = "invalid_key" + with pytest.raises(ValueError) as exc_info: # noqa: PT011 + update_data_component.validate_text_key(data) + expected_error_message = ( + f"Text Key: '{update_data_component.text_key}' not found in the Data keys: {', '.join(data.data.keys())}" + ) + assert str(exc_info.value) == expected_error_message diff --git a/src/backend/tests/unit/components/tools/__init__.py b/src/backend/tests/unit/components/tools/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/components/tools/test_python_repl_tool.py b/src/backend/tests/unit/components/tools/test_python_repl_tool.py new file mode 100644 index 000000000000..d0f0921b7caa --- /dev/null +++ b/src/backend/tests/unit/components/tools/test_python_repl_tool.py @@ -0,0 +1,30 @@ +from langflow.components.tools import PythonREPLToolComponent +from langflow.custom import Component +from langflow.custom.utils import build_custom_component_template + + +def test_python_repl_tool_template(): + python_repl_tool = PythonREPLToolComponent() + component = Component(_code=python_repl_tool._code) + frontend_node, _ = build_custom_component_template(component) + assert "outputs" in frontend_node + output_names = [output["name"] for output in frontend_node["outputs"]] + assert "api_run_model" in output_names + assert "api_build_tool" in output_names + assert all(output["types"] != [] for output in frontend_node["outputs"]) + + # Additional assertions specific to PythonREPLToolComponent + input_names = [input_["name"] for input_ in frontend_node["template"].values() if isinstance(input_, dict)] + # assert "input_value" in input_names + assert "name" in input_names + assert "description" in input_names + assert "global_imports" in input_names + + global_imports_input = next( + input_ + for input_ in frontend_node["template"].values() + if isinstance(input_, dict) and input_["name"] == "global_imports" + ) + assert global_imports_input["type"] == "str" + # assert global_imports_input["combobox"] is True + assert global_imports_input["value"] == "math" diff --git a/src/backend/tests/unit/components/tools/test_yfinance_tool.py b/src/backend/tests/unit/components/tools/test_yfinance_tool.py new file mode 100644 index 000000000000..29133929f83a --- /dev/null +++ b/src/backend/tests/unit/components/tools/test_yfinance_tool.py @@ -0,0 +1,14 @@ +from langflow.components.tools import YfinanceToolComponent +from langflow.custom import Component +from langflow.custom.utils import build_custom_component_template + + +def test_yfinance_tool_template(): + yf_tool = YfinanceToolComponent() + component = Component(_code=yf_tool._code) + frontend_node, _ = build_custom_component_template(component) + assert "outputs" in frontend_node + output_names = [output["name"] for output in frontend_node["outputs"]] + assert "api_run_model" in output_names + assert "api_build_tool" in output_names + assert all(output["types"] != [] for output in frontend_node["outputs"]) diff --git a/src/backend/tests/unit/custom/__init__.py b/src/backend/tests/unit/custom/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/custom/component/__init__.py b/src/backend/tests/unit/custom/component/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/custom/component/test_component_to_tool.py b/src/backend/tests/unit/custom/component/test_component_to_tool.py index 076240a4d39a..4b4caec45cb7 100644 --- a/src/backend/tests/unit/custom/component/test_component_to_tool.py +++ b/src/backend/tests/unit/custom/component/test_component_to_tool.py @@ -1,16 +1,21 @@ -import pytest +from collections.abc import Callable -from langflow.components.inputs.ChatInput import ChatInput +from langflow.base.agents.agent import DEFAULT_TOOLS_DESCRIPTION +from langflow.components.agents.agent import AgentComponent +from langflow.components.tools.calculator import CalculatorToolComponent -@pytest.fixture -def client(): - pass +def test_component_to_toolkit(): + calculator_component = CalculatorToolComponent() + agent_component = AgentComponent().set(tools=[calculator_component]) + tools = agent_component.to_toolkit() + assert len(tools) == 1 + tool = tools[0] -def test_component_to_tool(): - chat_input = ChatInput() - tool = chat_input.to_tool() - assert tool.name == "ChatInput" - assert tool.description == "Get chat inputs from the Playground." - assert tool.component._id == chat_input._id + assert tool.name == "Agent" + + assert tool.description == DEFAULT_TOOLS_DESCRIPTION, tool.description + + assert isinstance(tool.coroutine, Callable) + assert tool.args_schema is not None diff --git a/src/backend/tests/unit/custom/component/test_componet_set_functionality.py b/src/backend/tests/unit/custom/component/test_componet_set_functionality.py new file mode 100644 index 000000000000..d6591f40f3e9 --- /dev/null +++ b/src/backend/tests/unit/custom/component/test_componet_set_functionality.py @@ -0,0 +1,49 @@ +import pytest +from langflow.custom import Component +from langflow.inputs.inputs import MessageTextInput, StrInput + + +@pytest.fixture +def setup_component(): + # Create a sample component for testing + component = Component() + # Define inputs for the component + component.inputs = [ + MessageTextInput(name="list_message_input", is_list=True), # Input for a mock component + StrInput(name="mixed_input"), # Input for a mixed list + ] + return component + + +def test_set_with_mixed_list_input(setup_component): + component = setup_component + # Create a mock component to include in the list + mock_component = Component() + message_input_1 = "message data1" + message_input_2 = "message data2" + data = {"mixed_input": [message_input_1, message_input_2], "list_message_input": [message_input_1, mock_component]} + component.set(**data) + + # Assert that the mixed input was set correctly + assert hasattr(component, "mixed_input") + assert len(component.mixed_input) == 2 + assert component.mixed_input[0] == message_input_1 + assert component.mixed_input[1] == message_input_2 + assert component.list_message_input[0] == message_input_1 + assert component.list_message_input[1] == mock_component + + +def test_set_with_message_text_input_list(setup_component): + component = setup_component + # Create a list of MessageTextInput instances + message_input_1 = "message data1" + message_input_2 = "message data2" + data = {"mixed_input": [message_input_1, message_input_2], "list_message_input": [message_input_1, message_input_2]} + # Set a list containing MessageTextInput instances + component.set(**data) + + # Assert that the mixed input was set correctly + assert hasattr(component, "mixed_input") + assert len(component.list_message_input) == 2 + assert component.list_message_input[0] == message_input_1 + assert component.list_message_input[1] == message_input_2 diff --git a/src/backend/tests/unit/custom/custom_component/__init__.py b/src/backend/tests/unit/custom/custom_component/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/custom/custom_component/test_component.py b/src/backend/tests/unit/custom/custom_component/test_component.py index b52d993e0e27..8a0fd11eeb12 100644 --- a/src/backend/tests/unit/custom/custom_component/test_component.py +++ b/src/backend/tests/unit/custom/custom_component/test_component.py @@ -1,28 +1,60 @@ import pytest - -from langflow.components.agents.CrewAIAgent import CrewAIAgentComponent -from langflow.components.helpers.SequentialTask import SequentialTaskComponent -from langflow.components.inputs.ChatInput import ChatInput +from langflow.components.agents import AgentComponent +from langflow.components.crewai import CrewAIAgentComponent, SequentialTaskComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent from langflow.components.outputs import ChatOutput - - -@pytest.fixture -def client(): - pass +from langflow.template import Output def test_set_invalid_output(): chatinput = ChatInput() chatoutput = ChatOutput() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Method build_config is not a valid output of ChatInput"): chatoutput.set(input_value=chatinput.build_config) -def test_set_invalid_input(): +def test_set_component(): crewai_agent = CrewAIAgentComponent() task = SequentialTaskComponent() - with pytest.raises( - ValueError, - match="You set CrewAI Agent as value for `agent`. You should pass one of the following: 'build_output'", - ): - task.set(agent=crewai_agent) + task.set(agent=crewai_agent) + assert task._edges[0]["source"] == crewai_agent._id + assert crewai_agent in task._components + + +def _output_required_inputs_are_in_inputs(output: Output, inputs: list[str]): + return all(input_type in inputs for input_type in output.required_inputs) + + +def _assert_all_outputs_have_different_required_inputs(outputs: list[Output]): + required_inputs = [tuple(output.required_inputs) for output in outputs] + assert len(required_inputs) == len(set(required_inputs)), "All outputs must have different required inputs" + return True + + +def test_set_required_inputs(): + chatinput = ChatInput() + + assert all(_output_required_inputs_are_in_inputs(output, chatinput._inputs) for output in chatinput.outputs) + assert _assert_all_outputs_have_different_required_inputs(chatinput.outputs) + + +def test_set_required_inputs_various_components(): + chatinput = ChatInput() + chatoutput = ChatOutput() + task = SequentialTaskComponent() + agent = AgentComponent() + openai_component = OpenAIModelComponent() + + assert all(_output_required_inputs_are_in_inputs(output, chatinput._inputs) for output in chatinput.outputs) + assert all(_output_required_inputs_are_in_inputs(output, chatoutput._inputs) for output in chatoutput.outputs) + assert all(_output_required_inputs_are_in_inputs(output, task._inputs) for output in task.outputs) + assert all(_output_required_inputs_are_in_inputs(output, agent._inputs) for output in agent.outputs) + assert all( + _output_required_inputs_are_in_inputs(output, openai_component._inputs) for output in openai_component.outputs + ) + + assert _assert_all_outputs_have_different_required_inputs(chatinput.outputs) + assert _assert_all_outputs_have_different_required_inputs(chatoutput.outputs) + assert _assert_all_outputs_have_different_required_inputs(task.outputs) + assert _assert_all_outputs_have_different_required_inputs(agent.outputs) diff --git a/src/backend/tests/unit/custom/custom_component/test_component_events.py b/src/backend/tests/unit/custom/custom_component/test_component_events.py new file mode 100644 index 000000000000..a987fd1af47e --- /dev/null +++ b/src/backend/tests/unit/custom/custom_component/test_component_events.py @@ -0,0 +1,244 @@ +import asyncio +from typing import Any +from unittest.mock import MagicMock + +import pytest +from langflow.custom.custom_component.component import Component +from langflow.events.event_manager import EventManager +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import TextContent, ToolContent +from langflow.schema.message import Message +from langflow.schema.properties import Source +from langflow.template.field.base import Output + + +async def create_event_queue(): + """Create a queue for testing events.""" + return asyncio.Queue() + + +class ComponentForTesting(Component): + """Test component that implements basic functionality.""" + + def build(self) -> None: + pass + + def get_text(self) -> str: + """Return a simple text output.""" + return "test output" + + def get_tool(self) -> dict[str, Any]: + """Return a tool output.""" + return {"name": "test_tool", "description": "A test tool"} + + +@pytest.mark.usefixtures("client") +async def test_component_message_sending(): + """Test component's message sending functionality.""" + # Create event queue and manager + queue = await create_event_queue() + event_manager = EventManager(queue) + + # Create component + component = ComponentForTesting() + component.set_event_manager(event_manager) + + # Create a message + message = Message( + sender="test_sender", + session_id="test_session", + sender_name="test_sender_name", + content_blocks=[ContentBlock(title="Test Block", contents=[TextContent(type="text", text="Test message")])], + ) + + # Send the message + sent_message = await asyncio.to_thread(component.send_message, message) + + # Verify the message was sent + assert sent_message.id is not None + assert len(sent_message.content_blocks) == 1 + assert isinstance(sent_message.content_blocks[0].contents[0], TextContent) + + +@pytest.mark.usefixtures("client") +async def test_component_tool_output(): + """Test component's tool output functionality.""" + # Create event queue and manager + queue = await create_event_queue() + event_manager = EventManager(queue) + + # Create component + component = ComponentForTesting() + component.set_event_manager(event_manager) + + # Create a message with tool content + message = Message( + sender="test_sender", + session_id="test_session", + sender_name="test_sender_name", + content_blocks=[ + ContentBlock( + title="Tool Output", + contents=[ToolContent(type="tool_use", name="test_tool", tool_input={"query": "test input"})], + ) + ], + ) + + # Send the message + sent_message = await asyncio.to_thread(component.send_message, message) + + # Verify the message was stored and processed + assert sent_message.id is not None + assert len(sent_message.content_blocks) == 1 + assert isinstance(sent_message.content_blocks[0].contents[0], ToolContent) + + +@pytest.mark.usefixtures("client") +async def test_component_error_handling(): + """Test component's error handling.""" + # Create event queue and manager + queue = await create_event_queue() + event_manager = EventManager(queue) + + # Create component + component = ComponentForTesting() + component.set_event_manager(event_manager) + + # Trigger an error + class CustomError(Exception): + pass + + try: + msg = "Test error" + raise CustomError(msg) + except CustomError as e: + sent_message = await asyncio.to_thread( + component.send_error, + exception=e, + session_id="test_session", + trace_name="test_trace", + source=Source(id="test_id", display_name="Test Component", source="Test Component"), + ) + + # Verify error message + assert sent_message is not None + assert "Test error" in str(sent_message.text) + + +@pytest.mark.usefixtures("client") +async def test_component_build_results(): + """Test component's build_results functionality.""" + # Create event queue and manager + queue = await create_event_queue() + event_manager = EventManager(queue) + + # Create component + component = ComponentForTesting() + component.set_event_manager(event_manager) + + # Add outputs to the component + component._outputs_map = { + "text_output": Output(name="text_output", method="get_text"), + "tool_output": Output(name="tool_output", method="get_tool"), + } + + # Build results + results, artifacts = await component._build_results() + + # Verify results + assert "text_output" in results + assert results["text_output"] == "test output" + assert "tool_output" in results + assert results["tool_output"]["name"] == "test_tool" + + # Verify artifacts + assert "text_output" in artifacts + assert "tool_output" in artifacts + assert artifacts["text_output"]["type"] == "text" + + +@pytest.mark.usefixtures("client") +async def test_component_logging(): + """Test component's logging functionality.""" + # Create event queue and manager + queue = await create_event_queue() + event_manager = EventManager(queue) + + # Create component + component = ComponentForTesting() + component.set_event_manager(event_manager) + + # Set current output (required for logging) + component._current_output = "test_output" + component._id = "test_component_id" # Set component ID + + # Create a custom callback for logging + def log_callback(*, manager: EventManager, event_type: str, data: dict): # noqa: ARG001 + manager.send_event( + event_type="info", data={"message": data["message"], "id": data.get("component_id", "test_id")} + ) + + # Register the log event with custom callback + event_manager.register_event("on_log", "info", callback=log_callback) + + # Log a message + await asyncio.to_thread(component.log, "Test log message") + + # Get the event from the queue + event_id, event_data, _ = queue.get_nowait() + event = event_data.decode("utf-8") + + assert "Test log message" in event + assert event_id.startswith("info-") + + +@pytest.mark.usefixtures("client") +async def test_component_streaming_message(): + """Test component's streaming message functionality.""" + queue = await create_event_queue() + event_manager = EventManager(queue) + event_manager.register_event("on_token", "token") + + # Create a proper mock vertex with graph and flow_id + vertex = MagicMock() + mock_graph = MagicMock() + mock_graph.flow_id = "12345678-1234-5678-1234-567812345678" # Valid UUID string + vertex.graph = mock_graph + + component = ComponentForTesting(_vertex=vertex) + component.set_event_manager(event_manager) + + # Create a chunk class that mimics LangChain's streaming format + class StreamChunk: + def __init__(self, content: str): + self.content = content + + async def text_generator(): + chunks = ["Hello", " ", "World", "!"] + for chunk in chunks: + yield StreamChunk(chunk) + + # Create a streaming message + message = Message( + sender="test_sender", + session_id="test_session", + sender_name="test_sender_name", + text=text_generator(), + ) + + # Send the streaming message + sent_message = await asyncio.to_thread(component.send_message, message) + + # Verify the message + assert sent_message.id is not None + assert sent_message.text == "Hello World!" + + # Check tokens in queue + tokens = [] + while not queue.empty(): + _, event_data, _ = queue.get_nowait() + event = event_data.decode("utf-8") + if "token" in event: + tokens.append(event) + + assert len(tokens) > 0 diff --git a/src/backend/tests/unit/custom/custom_component/test_update_outputs.py b/src/backend/tests/unit/custom/custom_component/test_update_outputs.py new file mode 100644 index 000000000000..0de0de5cc19e --- /dev/null +++ b/src/backend/tests/unit/custom/custom_component/test_update_outputs.py @@ -0,0 +1,245 @@ +import pytest +from langflow.base.tools.constants import TOOL_OUTPUT_DISPLAY_NAME, TOOL_OUTPUT_NAME +from langflow.custom.custom_component.component import Component + + +class TestComponentOutputs: + def test_run_and_validate_update_outputs_tool_mode(self): + """Test run_and_validate_update_outputs with tool_mode field.""" + + class TestComponent(Component): + def build(self) -> None: + pass + + component = TestComponent() + + # Create a frontend node with regular outputs + original_outputs = [ + { + "name": "regular_output", + "type": "str", + "display_name": "Regular Output", + "method": "get_output", + "types": ["Any"], + "selected": "Any", + "value": "__UNDEFINED__", + "cache": True, + "required_inputs": None, + "hidden": None, + } + ] + frontend_node = { + "outputs": original_outputs.copy() # Make a copy to preserve original + } + + # Test enabling tool mode + updated_node = component.run_and_validate_update_outputs( + frontend_node=frontend_node.copy(), # Use a copy to avoid modifying original + field_name="tool_mode", + field_value=True, + ) + + # Verify tool output is added and regular output is removed + assert len(updated_node["outputs"]) == 1 + assert updated_node["outputs"][0]["name"] == TOOL_OUTPUT_NAME + assert updated_node["outputs"][0]["display_name"] == TOOL_OUTPUT_DISPLAY_NAME + + # Test disabling tool mode - use the original frontend node + updated_node = component.run_and_validate_update_outputs( + frontend_node={"outputs": original_outputs.copy()}, # Use original outputs + field_name="tool_mode", + field_value=False, + ) + + # Verify original outputs are restored + assert len(updated_node["outputs"]) == 1 + # Compare only essential fields instead of the entire dict + assert updated_node["outputs"][0]["name"] == original_outputs[0]["name"] + assert updated_node["outputs"][0]["display_name"] == original_outputs[0]["display_name"] + assert updated_node["outputs"][0]["method"] == original_outputs[0]["method"] + assert "types" in updated_node["outputs"][0] + assert "selected" in updated_node["outputs"][0] + + def test_run_and_validate_update_outputs_invalid_output(self): + """Test run_and_validate_update_outputs with invalid output structure.""" + + class TestComponent(Component): + def build(self) -> None: + pass + + component = TestComponent() + + # Create a frontend node with invalid output structure + frontend_node = {"outputs": [{"invalid_field": "value"}]} + + # Test validation fails for invalid output + with pytest.raises(ValueError, match="Invalid output: 1 validation error for Output"): + component.run_and_validate_update_outputs( + frontend_node=frontend_node, field_name="some_field", field_value="some_value" + ) + + def test_run_and_validate_update_outputs_custom_update(self): + """Test run_and_validate_update_outputs with custom update logic.""" + + class CustomComponent(Component): + def build(self) -> None: + pass + + def get_custom(self) -> str: + """Method that returns a string.""" + return "custom output" + + def update_outputs(self, frontend_node, field_name, field_value): # noqa: ARG002 + if field_name == "custom_field": + frontend_node["outputs"].append( + { + "name": "custom_output", + "type": "str", + "display_name": "Custom Output", + "method": "get_custom", + "types": ["Any"], + "selected": "Any", + "value": "__UNDEFINED__", + "cache": True, + "required_inputs": None, + "hidden": None, + } + ) + return frontend_node + + component = CustomComponent() + frontend_node = {"outputs": []} + + # Test custom update logic + updated_node = component.run_and_validate_update_outputs( + frontend_node=frontend_node, field_name="custom_field", field_value="custom_value" + ) + + assert len(updated_node["outputs"]) == 1 + assert updated_node["outputs"][0]["name"] == "custom_output" + assert updated_node["outputs"][0]["display_name"] == "Custom Output" + assert updated_node["outputs"][0]["method"] == "get_custom" + assert "types" in updated_node["outputs"][0] + assert "selected" in updated_node["outputs"][0] + + def test_run_and_validate_update_outputs_with_existing_tool_output(self): + """Test run_and_validate_update_outputs when tool output already exists.""" + + class TestComponent(Component): + def build(self) -> None: + pass + + def to_toolkit(self) -> list: + """Method that returns a list of tools.""" + return [] + + component = TestComponent() + + # Create a frontend node with tool output already present + frontend_node = { + "outputs": [ + { + "name": TOOL_OUTPUT_NAME, # Use constant instead of hardcoded string + "type": "Tool", + "display_name": TOOL_OUTPUT_DISPLAY_NAME, # Use constant + "method": "to_toolkit", + "types": ["Tool"], + "selected": "Tool", + "value": "__UNDEFINED__", + "cache": True, + "required_inputs": None, + "hidden": None, + } + ] + } + + # Test enabling tool mode doesn't duplicate tool output + updated_node = component.run_and_validate_update_outputs( + frontend_node=frontend_node, field_name="tool_mode", field_value=True + ) + + assert len(updated_node["outputs"]) == 1 + assert updated_node["outputs"][0]["name"] == TOOL_OUTPUT_NAME # Use constant + assert updated_node["outputs"][0]["display_name"] == TOOL_OUTPUT_DISPLAY_NAME # Use constant + assert "types" in updated_node["outputs"][0] + assert "selected" in updated_node["outputs"][0] + + def test_run_and_validate_update_outputs_with_multiple_outputs(self): + """Test run_and_validate_update_outputs with multiple outputs.""" + + class TestComponent(Component): + def build(self) -> None: + pass + + def get_output1(self) -> str: + """Method that returns a string.""" + return "output1" + + def get_output2(self) -> str: + """Method that returns a string.""" + return "output2" + + def update_outputs(self, frontend_node, field_name, field_value): # noqa: ARG002 + if field_name == "add_output": + frontend_node["outputs"].extend( + [ + { + "name": "output1", + "type": "str", + "display_name": "Output 1", + "method": "get_output1", + }, + { + "name": "output2", + "type": "str", + "display_name": "Output 2", + "method": "get_output2", + }, + ] + ) + return frontend_node + + component = TestComponent() + frontend_node = {"outputs": []} + + # Test adding multiple outputs + updated_node = component.run_and_validate_update_outputs( + frontend_node=frontend_node, field_name="add_output", field_value=True + ) + + assert len(updated_node["outputs"]) == 2 + assert updated_node["outputs"][0]["name"] == "output1" + assert updated_node["outputs"][1]["name"] == "output2" + for output in updated_node["outputs"]: + assert "types" in output + assert "selected" in output + # The component adds only 'Text' type for string outputs + assert set(output["types"]) == {"Text"} + assert output["selected"] == "Text" + + def test_run_and_validate_update_outputs_output_validation(self): + """Test output validation in run_and_validate_update_outputs.""" + + class TestComponent(Component): + def build(self) -> None: + pass + + def get_test(self) -> str: + """Test method.""" + return "test" + + component = TestComponent() + + # Test invalid method name case + invalid_node = { + "outputs": [{"name": "test", "type": "str", "method": "nonexistent_method", "display_name": "Test"}] + } + + with pytest.raises(AttributeError, match="nonexistent_method not found in TestComponent"): + component.run_and_validate_update_outputs(frontend_node=invalid_node, field_name="test", field_value=True) + + # Test missing method case + invalid_node = {"outputs": [{"name": "test", "type": "str", "display_name": "Test"}]} + + with pytest.raises(ValueError, match="Output test does not have a method"): + component.run_and_validate_update_outputs(frontend_node=invalid_node, field_name="test", field_value=True) diff --git a/src/backend/tests/unit/events/__init__.py b/src/backend/tests/unit/events/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/events/test_event_manager.py b/src/backend/tests/unit/events/test_event_manager.py new file mode 100644 index 000000000000..4b15518d1768 --- /dev/null +++ b/src/backend/tests/unit/events/test_event_manager.py @@ -0,0 +1,228 @@ +import asyncio +import json +import time +import uuid + +import pytest +from langflow.events.event_manager import EventManager +from langflow.schema.log import LoggableType + + +class TestEventManager: + # Registering an event with a valid name and callback using a mock callback function + def test_register_event_with_valid_name_and_callback_with_mock_callback(self): + def mock_callback(event_type: str, data: LoggableType): + pass + + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type", mock_callback) + assert "on_test_event" in manager.events + assert manager.events["on_test_event"].func == mock_callback + + # Registering an event with an empty name + + def test_register_event_with_empty_name(self): + queue = asyncio.Queue() + manager = EventManager(queue) + with pytest.raises(ValueError, match="Event name cannot be empty"): + manager.register_event("", "test_type") + + # Registering an event with a valid name and no callback + def test_register_event_with_valid_name_and_no_callback(self): + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type") + assert "on_test_event" in manager.events + assert manager.events["on_test_event"].func == manager.send_event + + # Sending an event with valid event_type and data using pytest-asyncio plugin + async def test_sending_event_with_valid_type_and_data_asyncio_plugin(self): + async def mock_queue_put_nowait(item): + await queue.put(item) + + queue = asyncio.Queue() + queue.put_nowait = mock_queue_put_nowait + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type", manager.noop) + event_type = "test_type" + data = "test_data" + manager.send_event(event_type=event_type, data=data) + await queue.join() + assert queue.empty() + + # Accessing a non-registered event callback via __getattr__ with the recommended fix + def test_accessing_non_registered_event_callback_with_recommended_fix(self): + queue = asyncio.Queue() + manager = EventManager(queue) + result = manager.__getattr__("non_registered_event") + assert result == manager.noop + + # Accessing a registered event callback via __getattr__ + def test_accessing_registered_event_callback(self): + def mock_callback(event_type: str, data: LoggableType): + pass + + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type", mock_callback) + assert manager.on_test_event.func == mock_callback + + # Handling a large number of events in the queue + def test_handling_large_number_of_events(self): + async def mock_queue_put_nowait(item): + pass + + queue = asyncio.Queue() + queue.put_nowait = mock_queue_put_nowait + manager = EventManager(queue) + + for i in range(1000): + manager.register_event(f"on_test_event_{i}", "test_type", manager.noop) + + assert len(manager.events) == 1000 + + # Testing registration of an event with an invalid name with the recommended fix + def test_register_event_with_invalid_name_fixed(self): + def mock_callback(event_type, data): + pass + + queue = asyncio.Queue() + manager = EventManager(queue) + with pytest.raises(ValueError, match="Event name cannot be empty"): + manager.register_event("", "test_type", mock_callback) + with pytest.raises(ValueError, match="Event name must start with 'on_'"): + manager.register_event("invalid_name", "test_type", mock_callback) + + # Sending an event with complex data and verifying successful event transmission + async def test_sending_event_with_complex_data(self): + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type", manager.noop) + data = {"key": "value", "nested": [1, 2, 3]} + manager.send_event(event_type="test_type", data=data) + event_id, str_data, event_time = await queue.get() + assert event_id is not None + assert str_data is not None + assert event_time <= time.time() + + # Sending an event with None data + def test_sending_event_with_none_data(self): + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type") + assert "on_test_event" in manager.events + assert manager.events["on_test_event"].func.__name__ == "send_event" + + # Ensuring thread-safety when accessing the events dictionary + async def test_thread_safety_accessing_events_dictionary(self): + def mock_callback(event_type: str, data: LoggableType): + pass + + async def register_events(manager): + manager.register_event("on_test_event_1", "test_type_1", mock_callback) + manager.register_event("on_test_event_2", "test_type_2", mock_callback) + + async def access_events(manager): + assert "on_test_event_1" in manager.events + assert "on_test_event_2" in manager.events + + queue = asyncio.Queue() + manager = EventManager(queue) + + await asyncio.gather(register_events(manager), access_events(manager)) + + # Checking the performance impact of frequent event registrations + def test_performance_impact_frequent_registrations(self): + async def mock_callback(event_type: str, data: LoggableType): + pass + + queue = asyncio.Queue() + manager = EventManager(queue) + for i in range(1000): + manager.register_event(f"on_test_event_{i}", "test_type", mock_callback) + assert len(manager.events) == 1000 + + # Verifying the uniqueness of event IDs for each event triggered using await with asyncio decorator + import pytest + + async def test_event_id_uniqueness_with_await(self): + queue = asyncio.Queue() + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type") + manager.on_test_event(data={"data_1": "value_1"}) + manager.on_test_event(data={"data_2": "value_2"}) + try: + event_id_1, _, _ = await queue.get() + event_id_2, _, _ = await queue.get() + except asyncio.TimeoutError: + pytest.fail("Test timed out while waiting for queue items") + + assert event_id_1 != event_id_2 + + # Ensuring the queue receives the correct event data format + async def test_queue_receives_correct_event_data_format(self): + async def mock_queue_put_nowait(data): + pass + + async def mock_queue_get(): + return (uuid.uuid4(), b'{"event": "test_type", "data": "test_data"}\n\n', time.time()) + + queue = asyncio.Queue() + queue.put_nowait = mock_queue_put_nowait + queue.get = mock_queue_get + + manager = EventManager(queue) + manager.register_event("on_test_event", "test_type", manager.noop) + event_data = "test_data" + manager.send_event(event_type="test_type", data=event_data) + + event_id, str_data, _ = await queue.get() + assert isinstance(event_id, uuid.UUID) + assert isinstance(str_data, bytes) + assert json.loads(str_data.decode("utf-8")) == {"event": "test_type", "data": event_data} + + # Registering an event without specifying the event_type argument and providing the event_type argument + def test_register_event_without_event_type_argument_fixed(self): + class MockQueue: + def __init__(self): + self.data = [] + + def put_nowait(self, item): + self.data.append(item) + + queue = MockQueue() + event_manager = EventManager(queue) + event_manager.register_event("on_test_event", "test_event_type", callback=event_manager.noop) + event_manager.send_event(event_type="test_type", data={"key": "value"}) + + assert len(queue.data) == 1 + event_id, str_data, timestamp = queue.data[0] + # event_id follows this pattern: f"{event_type}-{uuid.uuid4()}" + event_type_from_id = event_id.split("-")[0] + assert event_type_from_id == "test_type" + uuid_from_id = event_id.split(event_type_from_id)[1] + assert isinstance(uuid_from_id, str) + # assert that the uuid_from_id is a valid uuid + try: + uuid.UUID(uuid_from_id) + except ValueError: + pytest.fail(f"Invalid UUID: {uuid_from_id}") + assert isinstance(str_data, bytes) + assert isinstance(timestamp, float) + + # Accessing a non-registered event callback via __getattr__ + def test_accessing_non_registered_callback(self): + class MockQueue: + def __init__(self): + pass + + def put_nowait(self, item): + pass + + queue = MockQueue() + event_manager = EventManager(queue) + + # Accessing a non-registered event callback should return the 'noop' function + callback = event_manager.on_non_existing_event + assert callback.__name__ == "noop" diff --git a/src/backend/tests/unit/exceptions/__init__.py b/src/backend/tests/unit/exceptions/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/exceptions/test_api.py b/src/backend/tests/unit/exceptions/test_api.py index 542986f59272..9934eb7ce46b 100644 --- a/src/backend/tests/unit/exceptions/test_api.py +++ b/src/backend/tests/unit/exceptions/test_api.py @@ -1,4 +1,5 @@ -from unittest.mock import patch, Mock +from unittest.mock import Mock, patch + from langflow.services.database.models.flow.model import Flow @@ -15,28 +16,32 @@ def test_api_exception(): } # Expected result - with patch( - "langflow.services.database.models.flow.utils.get_outdated_components", return_value=mock_outdated_components + with ( + patch( + "langflow.services.database.models.flow.utils.get_outdated_components", + return_value=mock_outdated_components, + ), + patch("langflow.api.utils.get_suggestion_message", return_value=mock_suggestion_message), + patch( + "langflow.services.database.models.flow.utils.get_components_versions", + return_value=mock_component_versions, + ), ): - with patch("langflow.api.utils.get_suggestion_message", return_value=mock_suggestion_message): - with patch( - "langflow.services.database.models.flow.utils.get_components_versions", - return_value=mock_component_versions, - ): - # Create an APIException instance - api_exception = APIException(mock_exception, mock_flow) - - # Expected body - expected_body = ExceptionBody( - message="Test exception", - suggestion="The flow contains 2 outdated components. We recommend updating the following components: component1, component2.", - ) - - # Assert the status code - assert api_exception.status_code == 500 - - # Assert the detail - assert api_exception.detail == expected_body.model_dump_json() + # Create an APIException instance + api_exception = APIException(mock_exception, mock_flow) + + # Expected body + expected_body = ExceptionBody( + message="Test exception", + suggestion="The flow contains 2 outdated components. " + "We recommend updating the following components: component1, component2.", + ) + + # Assert the status code + assert api_exception.status_code == 500 + + # Assert the detail + assert api_exception.detail == expected_body.model_dump_json() def test_api_exception_no_flow(): diff --git a/src/backend/tests/unit/graph/__init__.py b/src/backend/tests/unit/graph/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/graph/edge/__init__.py b/src/backend/tests/unit/graph/edge/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/graph/edge/test_edge_base.py b/src/backend/tests/unit/graph/edge/test_edge_base.py index 62676c4fbc94..a3e212c07b3d 100644 --- a/src/backend/tests/unit/graph/edge/test_edge_base.py +++ b/src/backend/tests/unit/graph/edge/test_edge_base.py @@ -1,18 +1,12 @@ import pytest +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent +from langflow.graph import Graph -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.graph.graph.base import Graph - -@pytest.fixture -def client(): - pass - - -def test_edge_raises_error_on_invalid_target_handle(client): +def test_edge_raises_error_on_invalid_target_handle(): template = """Answer the user as if you were a pirate. User: {user_input} diff --git a/src/backend/tests/unit/graph/graph/__init__.py b/src/backend/tests/unit/graph/graph/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/graph/graph/state/__init__.py b/src/backend/tests/unit/graph/graph/state/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/graph/graph/state/test_state_model.py b/src/backend/tests/unit/graph/graph/state/test_state_model.py index c7b78842aa28..b348266de655 100644 --- a/src/backend/tests/unit/graph/graph/state/test_state_model.py +++ b/src/backend/tests/unit/graph/graph/state/test_state_model.py @@ -1,17 +1,11 @@ import pytest -from pydantic import Field - from langflow.components.inputs import ChatInput -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.graph.graph.base import Graph +from langflow.components.outputs import ChatOutput +from langflow.graph import Graph from langflow.graph.graph.constants import Finish from langflow.graph.state.model import create_state_model from langflow.template.field.base import UNDEFINED - - -@pytest.fixture -def client(): - pass +from pydantic import Field @pytest.fixture @@ -28,27 +22,27 @@ class TestCreateStateModel: # Successfully create a model with valid method return type annotations def test_create_model_with_valid_return_type_annotations(self, chat_input_component): - StateModel = create_state_model(method_one=chat_input_component.message_response) + state_model = create_state_model(method_one=chat_input_component.message_response) - state_instance = StateModel() + state_instance = state_model() assert state_instance.method_one is UNDEFINED chat_input_component.set_output_value("message", "test") assert state_instance.method_one == "test" def test_create_model_and_assign_values_fails(self, chat_input_component): - StateModel = create_state_model(method_one=chat_input_component.message_response) + state_model = create_state_model(method_one=chat_input_component.message_response) - state_instance = StateModel() + state_instance = state_model() state_instance.method_one = "test" assert state_instance.method_one == "test" def test_create_with_multiple_components(self, chat_input_component, chat_output_component): - NewStateModel = create_state_model( + new_state_model = create_state_model( model_name="NewStateModel", first_method=chat_input_component.message_response, second_method=chat_output_component.message_response, ) - state_instance = NewStateModel() + state_instance = new_state_model() assert state_instance.first_method is UNDEFINED assert state_instance.second_method is UNDEFINED state_instance.first_method = "test" @@ -57,9 +51,9 @@ def test_create_with_multiple_components(self, chat_input_component, chat_output assert state_instance.second_method == 123 def test_create_with_pydantic_field(self, chat_input_component): - StateModel = create_state_model(method_one=chat_input_component.message_response, my_attribute=Field(None)) + state_model = create_state_model(method_one=chat_input_component.message_response, my_attribute=Field(None)) - state_instance = StateModel() + state_instance = state_model() state_instance.method_one = "test" state_instance.my_attribute = "test" assert state_instance.method_one == "test" @@ -70,14 +64,14 @@ def test_create_with_pydantic_field(self, chat_input_component): # Creates a model with fields based on provided keyword arguments def test_create_model_with_fields_from_kwargs(self): - StateModel = create_state_model(field_one=(str, "default"), field_two=(int, 123)) - state_instance = StateModel() + state_model = create_state_model(field_one=(str, "default"), field_two=(int, 123)) + state_instance = state_model() assert state_instance.field_one == "default" assert state_instance.field_two == 123 # Raises ValueError for invalid field type in tuple-based definitions - def test_raise_valueerror_for_invalid_field_type_in_tuple(self): - with pytest.raises(ValueError, match="Invalid type for field invalid_field"): + def test_raise_typeerror_for_invalid_field_type_in_tuple(self): + with pytest.raises(TypeError, match="Invalid type for field invalid_field"): create_state_model(invalid_field=("not_a_type", "default")) # Raises ValueError for unsupported value types in keyword arguments @@ -87,16 +81,16 @@ def test_raise_valueerror_for_unsupported_value_types(self): # Handles empty keyword arguments gracefully def test_handle_empty_kwargs_gracefully(self): - StateModel = create_state_model() - state_instance = StateModel() + state_model = create_state_model() + state_instance = state_model() assert state_instance is not None # Ensures model name defaults to "State" if not provided def test_default_model_name_to_state(self): - StateModel = create_state_model() - assert StateModel.__name__ == "State" - OtherNameModel = create_state_model(model_name="OtherName") - assert OtherNameModel.__name__ == "OtherName" + state_model = create_state_model() + assert state_model.__name__ == "State" + other_name_model = create_state_model(model_name="OtherName") + assert other_name_model.__name__ == "OtherName" # Validates that callable values are properly type-annotated @@ -112,12 +106,12 @@ def method_two(self) -> int: with pytest.raises(ValueError, match="get_output_by_method"): create_state_model(method_one=mock_component.method_one, method_two=mock_component.method_two) + @pytest.mark.skip(reason="Temporarily disabled: sqlalchemy.exc.OperationalError") def test_graph_functional_start_state_update(self): - chat_input = ChatInput(_id="chat_input") - chat_output = ChatOutput(input_value="test", _id="chat_output") + chat_input = ChatInput(_id="chat_input", session_id="test", input_value="test") + chat_output = ChatOutput(input_value="test", _id="chat_output", session_id="test") chat_output.set(sender_name=chat_input.message_response) - ChatStateModel = create_state_model(model_name="ChatState", message=chat_output.message_response) - chat_state_model = ChatStateModel() + chat_state_model = create_state_model(model_name="ChatState", message=chat_output.message_response)() assert chat_state_model.__class__.__name__ == "ChatState" assert chat_state_model.message is UNDEFINED @@ -127,9 +121,7 @@ def test_graph_functional_start_state_update(self): # and check that the graph is running # correctly ids = ["chat_input", "chat_output"] - results = [] - for result in graph.start(): - results.append(result) + results = list(graph.start()) assert len(results) == 3 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) diff --git a/src/backend/tests/unit/graph/graph/test_base.py b/src/backend/tests/unit/graph/graph/test_base.py index 59908d9dca43..75b9aa9fe6f0 100644 --- a/src/backend/tests/unit/graph/graph/test_base.py +++ b/src/backend/tests/unit/graph/graph/test_base.py @@ -1,66 +1,63 @@ +import asyncio +import logging from collections import deque import pytest - -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.outputs.TextOutput import TextOutputComponent -from langflow.graph.graph.base import Graph +from langflow.components.inputs import ChatInput +from langflow.components.langchain_utilities import ToolCallingAgentComponent +from langflow.components.outputs import ChatOutput, TextOutputComponent +from langflow.components.tools import YfinanceToolComponent +from langflow.graph import Graph from langflow.graph.graph.constants import Finish -@pytest.fixture -def client(): - pass - - -@pytest.mark.asyncio async def test_graph_not_prepared(): chat_input = ChatInput() chat_output = ChatOutput() - graph = Graph() - graph.add_component("chat_input", chat_input) - graph.add_component("chat_output", chat_output) - with pytest.raises(ValueError): + graph = await asyncio.to_thread(Graph) + graph.add_component(chat_input) + graph.add_component(chat_output) + with pytest.raises(ValueError, match="Graph not prepared"): await graph.astep() -@pytest.mark.asyncio -async def test_graph(): +async def test_graph(caplog: pytest.LogCaptureFixture): chat_input = ChatInput() chat_output = ChatOutput() - graph = Graph() - graph.add_component("chat_input", chat_input) - graph.add_component("chat_output", chat_output) - with pytest.warns(UserWarning, match="Graph has vertices but no edges"): + graph = await asyncio.to_thread(Graph) + graph.add_component(chat_input) + graph.add_component(chat_output) + caplog.clear() + with caplog.at_level(logging.WARNING): graph.prepare() + assert "Graph has vertices but no edges" in caplog.text -@pytest.mark.asyncio async def test_graph_with_edge(): chat_input = ChatInput() chat_output = ChatOutput() - graph = Graph() - graph.add_component("chat_input", chat_input) - graph.add_component("chat_output", chat_output) - graph.add_component_edge("chat_input", (chat_input.outputs[0].name, chat_input.inputs[0].name), "chat_output") + graph = await asyncio.to_thread(Graph) + input_id = graph.add_component(chat_input) + output_id = graph.add_component(chat_output) + graph.add_component_edge(input_id, (chat_input.outputs[0].name, chat_input.inputs[0].name), output_id) graph.prepare() - assert graph._run_queue == deque(["chat_input"]) + # ensure prepare is idempotent + graph.prepare() + assert graph._run_queue == deque([input_id]) await graph.astep() - assert graph._run_queue == deque(["chat_output"]) + assert graph._run_queue == deque([output_id]) - assert graph.vertices[0].id == "chat_input" - assert graph.vertices[1].id == "chat_output" - assert graph.edges[0].source_id == "chat_input" - assert graph.edges[0].target_id == "chat_output" + assert graph.vertices[0].id == input_id + assert graph.vertices[1].id == output_id + assert graph.edges[0].source_id == input_id + assert graph.edges[0].target_id == output_id -@pytest.mark.asyncio async def test_graph_functional(): chat_input = ChatInput(_id="chat_input") chat_output = ChatOutput(input_value="test", _id="chat_output") chat_output.set(sender_name=chat_input.message_response) - graph = Graph(chat_input, chat_output) + graph = await asyncio.to_thread(Graph, chat_input, chat_output) assert graph._run_queue == deque(["chat_input"]) await graph.astep() assert graph._run_queue == deque(["chat_output"]) @@ -71,19 +68,16 @@ async def test_graph_functional(): assert graph.edges[0].target_id == "chat_output" -@pytest.mark.asyncio async def test_graph_functional_async_start(): chat_input = ChatInput(_id="chat_input") chat_output = ChatOutput(input_value="test", _id="chat_output") chat_output.set(sender_name=chat_input.message_response) - graph = Graph(chat_input, chat_output) + graph = await asyncio.to_thread(Graph, chat_input, chat_output) # Now iterate through the graph # and check that the graph is running # correctly ids = ["chat_input", "chat_output"] - results = [] - async for result in graph.async_start(): - results.append(result) + results = [result async for result in graph.async_start()] assert len(results) == 3 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) @@ -100,9 +94,7 @@ def test_graph_functional_start(): # and check that the graph is running # correctly ids = ["chat_input", "chat_output"] - results = [] - for result in graph.start(): - results.append(result) + results = list(graph.start()) assert len(results) == 3 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) @@ -121,9 +113,7 @@ def test_graph_functional_start_end(): # and check that the graph is running # correctly ids = ["chat_input", "text_output"] - results = [] - for result in graph.start(): - results.append(result) + results = list(graph.start()) assert len(results) == len(ids) + 1 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) @@ -139,3 +129,10 @@ def test_graph_functional_start_end(): assert len(results) == len(ids) + 1 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) assert results[-1] == Finish() + + +@pytest.mark.skip(reason="Temporarily disabled") +def test_graph_set_with_valid_component(): + tool = YfinanceToolComponent() + tool_calling_agent = ToolCallingAgentComponent() + tool_calling_agent.set(tools=[tool]) diff --git a/src/backend/tests/unit/graph/graph/test_callback_graph.py b/src/backend/tests/unit/graph/graph/test_callback_graph.py new file mode 100644 index 000000000000..d7830c64b89a --- /dev/null +++ b/src/backend/tests/unit/graph/graph/test_callback_graph.py @@ -0,0 +1,52 @@ +import asyncio + +import pytest +from langflow.components.outputs import ChatOutput +from langflow.custom import Component +from langflow.events.event_manager import EventManager +from langflow.graph import Graph +from langflow.inputs import IntInput +from langflow.schema.message import Message +from langflow.template import Output + + +class LogComponent(Component): + display_name = "LogComponent" + inputs = [IntInput(name="times", value=1)] + outputs = [Output(name="call_log", method="call_log_method")] + + def call_log_method(self) -> Message: + for i in range(self.times): + self.log(f"This is log message {i}", name=f"Log {i}") + return Message(text="Log called", sender="test_sender", sender_name="test_sender_name") + + +@pytest.mark.skip(reason="Temporarily disabled") +def test_callback_graph(): + logs: list[tuple[str, dict]] = [] + + def mock_callback(manager, event_type: str, data: dict): # noqa: ARG001 + logs.append((event_type, data)) + + event_manager = EventManager(queue=asyncio.Queue()) + event_manager.register_event("on_log", "log", callback=mock_callback) + + log_component = LogComponent(_id="log_component") + log_component.set(times=3) + chat_output = ChatOutput(_id="chat_output") + chat_output.set( + input_value="test_input_value", sender_name=log_component.call_log_method, session_id="test_session_id" + ) + graph = Graph(start=log_component, end=chat_output) + graph.session_id = "test_session_id" + results = list(graph.start(event_manager=event_manager)) + assert len(results) == 3 + assert len(logs) == 3 + assert all(isinstance(log, tuple) for log in logs) + assert all(isinstance(log[1], dict) for log in logs) + assert logs[0][0] == "log" + assert logs[0][1]["name"] == "Log 0" + assert logs[1][0] == "log" + assert logs[1][1]["name"] == "Log 1" + assert logs[2][0] == "log" + assert logs[2][1]["name"] == "Log 2" diff --git a/src/backend/tests/unit/graph/graph/test_cycles.py b/src/backend/tests/unit/graph/graph/test_cycles.py new file mode 100644 index 000000000000..0179e9ba59fe --- /dev/null +++ b/src/backend/tests/unit/graph/graph/test_cycles.py @@ -0,0 +1,326 @@ +import os + +import pytest +from langflow.components.inputs import ChatInput +from langflow.components.inputs.text import TextInputComponent +from langflow.components.logic.conditional_router import ConditionalRouterComponent +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput, TextOutputComponent +from langflow.components.prompts import PromptComponent +from langflow.custom import Component +from langflow.graph import Graph +from langflow.graph.graph.utils import find_cycle_vertices +from langflow.io import MessageTextInput, Output +from langflow.schema.message import Message + + +class Concatenate(Component): + display_name = "Concatenate" + description = "Concatenates two strings" + + inputs = [ + MessageTextInput(name="text", display_name="Text", required=True), + ] + outputs = [ + Output(display_name="Text", name="some_text", method="concatenate"), + ] + + def concatenate(self) -> Message: + return Message(text=f"{self.text}{self.text}" or "test") + + +@pytest.mark.skip(reason="Temporarily disabled") +def test_cycle_in_graph(): + chat_input = ChatInput(_id="chat_input") + router = ConditionalRouterComponent(_id="router", default_route="true_result") + chat_input.set(input_value=router.false_response) + concat_component = Concatenate(_id="concatenate") + concat_component.set(text=chat_input.message_response) + router.set( + input_text=chat_input.message_response, + match_text="testtesttesttest", + operator="equals", + message=concat_component.concatenate, + ) + text_output = TextOutputComponent(_id="text_output") + text_output.set(input_value=router.true_response) + chat_output = ChatOutput(_id="chat_output") + chat_output.set(input_value=text_output.text_response) + + graph = Graph(chat_input, chat_output) + assert graph.is_cyclic is True + + # Run queue should contain chat_input and not router + assert "chat_input" in graph._run_queue + assert "router" not in graph._run_queue + results = [] + max_iterations = 20 + snapshots = [graph._snapshot()] + for result in graph.start(max_iterations=max_iterations, config={"output": {"cache": False}}): + snapshots.append(graph._snapshot()) + results.append(result) + results_ids = [result.vertex.id for result in results if hasattr(result, "vertex")] + assert len(results_ids) > len(graph.vertices), snapshots + # Check that chat_output and text_output are the last vertices in the results + assert results_ids == [ + "chat_input", + "concatenate", + "router", + "chat_input", + "concatenate", + "router", + "chat_input", + "concatenate", + "router", + "chat_input", + "concatenate", + "router", + "text_output", + "chat_output", + ], f"Results: {results_ids}" + + +def test_cycle_in_graph_max_iterations(): + chat_input = ChatInput(_id="chat_input") + router = ConditionalRouterComponent(_id="router") + chat_input.set(input_value=router.false_response) + concat_component = Concatenate(_id="concatenate") + concat_component.set(text=chat_input.message_response) + router.set( + input_text=chat_input.message_response, + match_text="testtesttesttest", + operator="equals", + message=concat_component.concatenate, + ) + text_output = TextOutputComponent(_id="text_output") + text_output.set(input_value=router.true_response) + chat_output = ChatOutput(_id="chat_output") + chat_output.set(input_value=text_output.text_response) + + graph = Graph(chat_input, chat_output) + assert graph.is_cyclic is True + + # Run queue should contain chat_input and not router + assert "chat_input" in graph._run_queue + assert "router" not in graph._run_queue + + with pytest.raises(ValueError, match="Max iterations reached"): + list(graph.start(max_iterations=2, config={"output": {"cache": False}})) + + +def test_that_outputs_cache_is_set_to_false_in_cycle(): + chat_input = ChatInput(_id="chat_input") + router = ConditionalRouterComponent(_id="router") + chat_input.set(input_value=router.false_response) + concat_component = Concatenate(_id="concatenate") + concat_component.set(text=chat_input.message_response) + router.set( + input_text=chat_input.message_response, + match_text="testtesttesttest", + operator="equals", + message=concat_component.concatenate, + ) + text_output = TextOutputComponent(_id="text_output") + text_output.set(input_value=router.true_response) + chat_output = ChatOutput(_id="chat_output") + chat_output.set(input_value=text_output.text_response) + + graph = Graph(chat_input, chat_output) + cycle_vertices = find_cycle_vertices(graph._get_edges_as_list_of_tuples()) + cycle_outputs_lists = [ + graph.vertex_map[vertex_id].custom_component._outputs_map.values() for vertex_id in cycle_vertices + ] + cycle_outputs = [output for outputs in cycle_outputs_lists for output in outputs] + for output in cycle_outputs: + assert output.cache is False + + non_cycle_outputs_lists = [ + vertex.custom_component.outputs for vertex in graph.vertices if vertex.id not in cycle_vertices + ] + non_cycle_outputs = [output for outputs in non_cycle_outputs_lists for output in outputs] + for output in non_cycle_outputs: + assert output.cache is True + + +@pytest.mark.api_key_required +def test_updated_graph_with_prompts(): + # Chat input initialization + chat_input = ChatInput(_id="chat_input").set(input_value="bacon") + + # First prompt: Guessing game with hints + prompt_component_1 = PromptComponent(_id="prompt_component_1").set( + template="Try to guess a word. I will give you hints if you get it wrong.\n" + "Hint: {hint}\n" + "Last try: {last_try}\n" + "Answer:", + ) + + # First OpenAI LLM component (Processes the guessing prompt) + openai_component_1 = OpenAIModelComponent(_id="openai_1").set( + input_value=prompt_component_1.build_prompt, api_key=os.getenv("OPENAI_API_KEY") + ) + + # Conditional router based on agent response + router = ConditionalRouterComponent(_id="router").set( + input_text=openai_component_1.text_response, + match_text=chat_input.message_response, + operator="contains", + message=openai_component_1.text_response, + ) + + # Second prompt: After the last try, provide a new hint + prompt_component_2 = PromptComponent(_id="prompt_component_2") + prompt_component_2.set( + template="Given the following word and the following last try. Give the guesser a new hint.\n" + "Last try: {last_try}\n" + "Word: {word}\n" + "Hint:", + word=chat_input.message_response, + last_try=router.false_response, + ) + + # Second OpenAI component (handles the router's response) + openai_component_2 = OpenAIModelComponent(_id="openai_2") + openai_component_2.set(input_value=prompt_component_2.build_prompt, api_key=os.getenv("OPENAI_API_KEY")) + + prompt_component_1.set(hint=openai_component_2.text_response, last_try=router.false_response) + + # chat output for the final OpenAI response + chat_output_1 = ChatOutput(_id="chat_output_1") + chat_output_1.set(input_value=router.true_response) + + # Build the graph without concatenate + graph = Graph(chat_input, chat_output_1) + + # Assertions for graph cyclicity and correctness + assert graph.is_cyclic is True, "Graph should contain cycles." + + # Run and validate the execution of the graph + results = [] + max_iterations = 20 + snapshots = [graph.get_snapshot()] + + for result in graph.start(max_iterations=max_iterations, config={"output": {"cache": False}}): + snapshots.append(graph.get_snapshot()) + results.append(result) + + assert len(snapshots) > 2, "Graph should have more than one snapshot" + # Extract the vertex IDs for analysis + results_ids = [result.vertex.id for result in results if hasattr(result, "vertex")] + assert "chat_output_1" in results_ids, f"Expected outputs not in results: {results_ids}" + + +@pytest.mark.api_key_required +def test_updated_graph_with_max_iterations(): + # Chat input initialization + chat_input = ChatInput(_id="chat_input").set(input_value="bacon") + + # First prompt: Guessing game with hints + prompt_component_1 = PromptComponent(_id="prompt_component_1").set( + template="Try to guess a word. I will give you hints if you get it wrong.\n" + "Hint: {hint}\n" + "Last try: {last_try}\n" + "Answer:", + ) + + # First OpenAI LLM component (Processes the guessing prompt) + openai_component_1 = OpenAIModelComponent(_id="openai_1").set( + input_value=prompt_component_1.build_prompt, api_key=os.getenv("OPENAI_API_KEY") + ) + + # Conditional router based on agent response + router = ConditionalRouterComponent(_id="router").set( + input_text=openai_component_1.text_response, + match_text=chat_input.message_response, + operator="contains", + message=openai_component_1.text_response, + ) + + # Second prompt: After the last try, provide a new hint + prompt_component_2 = PromptComponent(_id="prompt_component_2") + prompt_component_2.set( + template="Given the following word and the following last try. Give the guesser a new hint.\n" + "Last try: {last_try}\n" + "Word: {word}\n" + "Hint:", + word=chat_input.message_response, + last_try=router.false_response, + ) + + # Second OpenAI component (handles the router's response) + openai_component_2 = OpenAIModelComponent(_id="openai_2") + openai_component_2.set(input_value=prompt_component_2.build_prompt, api_key=os.getenv("OPENAI_API_KEY")) + + prompt_component_1.set(hint=openai_component_2.text_response, last_try=router.false_response) + + # chat output for the final OpenAI response + chat_output_1 = ChatOutput(_id="chat_output_1") + chat_output_1.set(input_value=router.true_response) + + # Build the graph without concatenate + graph = Graph(chat_input, chat_output_1) + + # Assertions for graph cyclicity and correctness + assert graph.is_cyclic is True, "Graph should contain cycles." + + # Run and validate the execution of the graph + results = [] + max_iterations = 20 + snapshots = [graph.get_snapshot()] + + for result in graph.start(max_iterations=max_iterations, config={"output": {"cache": False}}): + snapshots.append(graph.get_snapshot()) + results.append(result) + + assert len(snapshots) > 2, "Graph should have more than one snapshot" + # Extract the vertex IDs for analysis + results_ids = [result.vertex.id for result in results if hasattr(result, "vertex")] + assert "chat_output_1" in results_ids, f"Expected outputs not in results: {results_ids}" + + +def test_conditional_router_max_iterations(): + # Chat input initialization + text_input = TextInputComponent(_id="text_input") + + # Conditional router setup with a condition that will never match + router = ConditionalRouterComponent(_id="router").set( + input_text=text_input.text_response, + match_text="bacon", + operator="equals", + message="This message should not be routed to true_result", + max_iterations=5, + default_route="true_result", + ) + + # Chat output for the true route + text_input.set(input_value=router.false_response) + + # Chat output for the false route + chat_output_false = ChatOutput(_id="chat_output_false") + chat_output_false.set(input_value=router.true_response) + + # Build the graph + graph = Graph(text_input, chat_output_false) + + # Assertions for graph cyclicity and correctness + assert graph.is_cyclic is True, "Graph should contain cycles." + + # Run and validate the execution of the graph + results = [] + snapshots = [graph.get_snapshot()] + previous_iteration = graph.context.get("router_iteration", 0) + for result in graph.start(max_iterations=20, config={"output": {"cache": False}}): + snapshots.append(graph.get_snapshot()) + results.append(result) + if hasattr(result, "vertex") and result.vertex.id == "router": + current_iteration = graph.context.get("router_iteration", 0) + assert current_iteration == previous_iteration + 1, "Iteration should increment by 1" + previous_iteration = current_iteration + + # Check if the max_iterations logic is working + router_id = router._id.lower() + assert graph.context.get(f"{router_id}_iteration", 0) == 5, "Router should stop after max_iterations" + + # Extract the vertex IDs for analysis + results_ids = [result.vertex.id for result in results if hasattr(result, "vertex")] + assert "chat_output_false" in results_ids, f"Expected outputs not in results: {results_ids}" diff --git a/src/backend/tests/unit/graph/graph/test_graph_state_model.py b/src/backend/tests/unit/graph/graph/test_graph_state_model.py index e7555596bc33..41a4e145d915 100644 --- a/src/backend/tests/unit/graph/graph/test_graph_state_model.py +++ b/src/backend/tests/unit/graph/graph/test_graph_state_model.py @@ -1,19 +1,17 @@ -import pytest -from pydantic import BaseModel +from typing import TYPE_CHECKING -from langflow.components.helpers.Memory import MemoryComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent +import pytest +from langflow.components.helpers.memory import MemoryComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent from langflow.graph import Graph from langflow.graph.graph.constants import Finish from langflow.graph.graph.state_model import create_state_model_from_graph - -@pytest.fixture -def client(): - pass +if TYPE_CHECKING: + from pydantic import BaseModel def test_graph_state_model(): @@ -40,9 +38,9 @@ def test_graph_state_model(): graph = Graph(chat_input, chat_output) - GraphStateModel = create_state_model_from_graph(graph) - assert GraphStateModel.__name__ == "GraphStateModel" - assert list(GraphStateModel.model_computed_fields.keys()) == [ + graph_state_model = create_state_model_from_graph(graph) + assert graph_state_model.__name__ == "GraphStateModel" + assert list(graph_state_model.model_computed_fields.keys()) == [ "chat_input", "chat_output", "openai", @@ -62,12 +60,9 @@ def test_graph_functional_start_graph_state_update(): # Now iterate through the graph # and check that the graph is running # correctly - GraphStateModel = create_state_model_from_graph(graph) - graph_state_model = GraphStateModel() + graph_state_model = create_state_model_from_graph(graph)() ids = ["chat_input", "chat_output"] - results = [] - for result in graph.start(): - results.append(result) + results = list(graph.start()) assert len(results) == 3 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) @@ -89,12 +84,9 @@ def test_graph_state_model_serialization(): # Now iterate through the graph # and check that the graph is running # correctly - GraphStateModel = create_state_model_from_graph(graph) - graph_state_model = GraphStateModel() + graph_state_model = create_state_model_from_graph(graph)() ids = ["chat_input", "chat_output"] - results = [] - for result in graph.start(): - results.append(result) + results = list(graph.start()) assert len(results) == 3 assert all(result.vertex.id in ids for result in results if hasattr(result, "vertex")) @@ -108,6 +100,7 @@ def test_graph_state_model_serialization(): assert serialized_state_model["chat_input"]["message"]["text"] == "Test Sender Name" +@pytest.mark.skip(reason="Not implemented yet") def test_graph_state_model_json_schema(): chat_input = ChatInput(_id="chat_input") chat_input.set(input_value="Test Sender Name") @@ -117,8 +110,7 @@ def test_graph_state_model_json_schema(): graph = Graph(chat_input, chat_output) graph.prepare() - GraphStateModel = create_state_model_from_graph(graph) - graph_state_model: BaseModel = GraphStateModel() + graph_state_model: BaseModel = create_state_model_from_graph(graph)() json_schema = graph_state_model.model_json_schema(mode="serialization") # Test main schema structure diff --git a/src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py b/src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py index 3b3e013b9ae1..188d19b915ca 100644 --- a/src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py +++ b/src/backend/tests/unit/graph/graph/test_runnable_vertices_manager.py @@ -1,14 +1,11 @@ import pickle -from collections import defaultdict +from typing import TYPE_CHECKING import pytest - from langflow.graph.graph.runnable_vertices_manager import RunnableVerticesManager - -@pytest.fixture -def client(): - pass +if TYPE_CHECKING: + from collections import defaultdict @pytest.fixture @@ -28,7 +25,7 @@ def data(): def test_to_dict(data): result = RunnableVerticesManager.from_dict(data).to_dict() - assert all(key in result.keys() for key in data.keys()) + assert all(key in result for key in data) def test_from_dict(data): @@ -69,7 +66,7 @@ def test_pickle(data): manager = RunnableVerticesManager.from_dict(data) binary = pickle.dumps(manager) - result = pickle.loads(binary) + result = pickle.loads(binary) # noqa: S301 assert result.run_map == manager.run_map assert result.run_predecessors == manager.run_predecessors @@ -94,7 +91,7 @@ def test_is_vertex_runnable(data): vertex_id = "A" is_active = True - result = manager.is_vertex_runnable(vertex_id, is_active) + result = manager.is_vertex_runnable(vertex_id, is_active=is_active) assert result is False @@ -104,7 +101,7 @@ def test_is_vertex_runnable__wrong_is_active(data): vertex_id = "A" is_active = False - result = manager.is_vertex_runnable(vertex_id, is_active) + result = manager.is_vertex_runnable(vertex_id, is_active=is_active) assert result is False @@ -114,7 +111,7 @@ def test_is_vertex_runnable__wrong_vertices_to_run(data): vertex_id = "D" is_active = True - result = manager.is_vertex_runnable(vertex_id, is_active) + result = manager.is_vertex_runnable(vertex_id, is_active=is_active) assert result is False @@ -124,7 +121,7 @@ def test_is_vertex_runnable__wrong_run_predecessors(data): vertex_id = "C" is_active = True - result = manager.is_vertex_runnable(vertex_id, is_active) + result = manager.is_vertex_runnable(vertex_id, is_active=is_active) assert result is False @@ -163,8 +160,8 @@ def test_build_run_map(data): manager.build_run_map(predecessor_map, vertices_to_run) - assert all(v in manager.run_map.keys() for v in ["Z", "X", "Y"]) - assert "W" not in manager.run_map.keys() + assert all(v in manager.run_map for v in ["Z", "X", "Y"]) + assert "W" not in manager.run_map def test_update_vertex_run_state(data): @@ -172,7 +169,7 @@ def test_update_vertex_run_state(data): vertex_id = "C" is_runnable = True - manager.update_vertex_run_state(vertex_id, is_runnable) + manager.update_vertex_run_state(vertex_id, is_runnable=is_runnable) assert vertex_id in manager.vertices_to_run @@ -182,7 +179,7 @@ def test_update_vertex_run_state__bad_case(data): vertex_id = "C" is_runnable = False - manager.update_vertex_run_state(vertex_id, is_runnable) + manager.update_vertex_run_state(vertex_id, is_runnable=is_runnable) assert vertex_id not in manager.vertices_being_run diff --git a/src/backend/tests/unit/graph/graph/test_utils.py b/src/backend/tests/unit/graph/graph/test_utils.py index 5f211604c7e2..982f73118af2 100644 --- a/src/backend/tests/unit/graph/graph/test_utils.py +++ b/src/backend/tests/unit/graph/graph/test_utils.py @@ -1,13 +1,9 @@ -import pytest +import copy +import pytest from langflow.graph.graph import utils -@pytest.fixture -def client(): - pass - - @pytest.fixture def graph(): return { @@ -45,7 +41,7 @@ def test_get_successors_a(graph): result = utils.get_successors(graph, vertex_id) - assert set(result) == {"A", "B", "D", "E", "F", "H", "G"} + assert set(result) == {"B", "D", "E", "F", "H", "G"} def test_get_successors_z(graph): @@ -53,7 +49,7 @@ def test_get_successors_z(graph): result = utils.get_successors(graph, vertex_id) - assert set(result) == {"Z"} + assert len(result) == 0 def test_sort_up_to_vertex_n_is_start(graph): @@ -123,7 +119,7 @@ def test_sort_up_to_vertex_a(graph): def test_sort_up_to_vertex_invalid_vertex(graph): vertex_id = "7" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Parent node map is required to find the root of a group node"): utils.sort_up_to_vertex(graph, vertex_id) @@ -303,3 +299,148 @@ def test_duplicate_edges(self): edges = [("A", "B"), ("A", "B"), ("B", "C"), ("C", "A"), ("C", "A")] result = utils.find_all_cycle_edges(entry_point, edges) assert set(result) == {("C", "A")} + + +class TestFindCycleVertices: + # Detect cycles in a simple directed graph + def test_detect_cycles_simple_graph(self): + edges = [("A", "B"), ("B", "C"), ("C", "A"), ("C", "D"), ("D", "E"), ("E", "F"), ("F", "C"), ("F", "G")] + expected_output = ["C", "A", "B", "D", "E", "F"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle an empty list of edges + def test_handle_empty_edges(self): + edges = [] + expected_output = [] + result = utils.find_cycle_vertices(edges) + assert result == expected_output + + # Return vertices involved in multiple cycles + def test_return_vertices_involved_in_multiple_cycles(self): + # Define the graph with multiple cycles + edges = [("A", "B"), ("B", "C"), ("C", "A"), ("C", "D"), ("D", "E"), ("E", "F"), ("F", "C"), ("F", "G")] + result = utils.find_cycle_vertices(edges) + assert set(result) == {"C", "A", "B", "D", "E", "F"} + + # Correctly identify and return vertices in a single cycle + def test_correctly_identify_and_return_vertices_in_single_cycle(self): + # Define the graph with a single cycle + edges = [("A", "B"), ("B", "C"), ("C", "A")] + result = utils.find_cycle_vertices(edges) + assert set(result) == {"C", "A", "B"} + + # Handle graphs with no cycles and return an empty list + def test_no_cycles_empty_list(self): + edges = [("A", "B"), ("B", "C"), ("D", "E"), ("E", "F")] + expected_output = [] + result = utils.find_cycle_vertices(edges) + assert result == expected_output + + # Process graphs with disconnected components + def test_process_disconnected_components(self): + edges = [ + ("A", "B"), + ("B", "C"), + ("C", "A"), + ("C", "D"), + ("D", "E"), + ("E", "F"), + ("F", "C"), + ("F", "G"), + ("X", "Y"), + ("Y", "Z"), + ] + expected_output = ["C", "A", "B", "D", "E", "F"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle graphs with self-loops + def test_handle_self_loops(self): + edges = [ + ("A", "B"), + ("B", "C"), + ("C", "A"), + ("C", "D"), + ("D", "E"), + ("E", "F"), + ("F", "C"), + ("F", "G"), + ("C", "C"), + ] + expected_output = ["C", "A", "B", "D", "E", "F"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle a graph where all vertices form a single cycle + def test_handle_single_cycle(self): + edges = [("A", "B"), ("B", "C"), ("C", "A")] + expected_output = ["C", "A", "B"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle a graph where the entry point has no outgoing edges + def test_handle_no_outgoing_edges(self): + edges = [("A", "B"), ("B", "C"), ("C", "D"), ("D", "E"), ("E", "F"), ("F", "G")] + expected_output = [] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle a graph with a single vertex and no edges + def test_single_vertex_no_edges(self): + edges = [] + expected_output = [] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Verify the function's behavior with non-string vertex IDs + def test_non_string_vertex_ids(self): + edges = [(1, 2), (2, 3), (3, 1), (3, 4), (4, 5), (5, 6), (6, 3), (6, 7)] + expected_output = [1, 2, 3, 4, 5, 6] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Ensure no modification of the input edges list + def test_no_modification_of_input_edges_list(self): + edges = [("A", "B"), ("B", "C"), ("C", "A"), ("C", "D"), ("D", "E"), ("E", "F"), ("F", "C"), ("F", "G")] + original_edges = copy.deepcopy(edges) + utils.find_cycle_vertices(edges) + assert edges == original_edges + + # Handle large graphs efficiently + def test_handle_large_graphs_efficiently(self): + edges = [("A", "B"), ("B", "C"), ("C", "A"), ("C", "D"), ("D", "E"), ("E", "F"), ("F", "C"), ("F", "G")] + expected_output = ["C", "A", "B", "D", "E", "F"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + # Handle graphs with duplicate edges and verify correct cycle vertices are detected + def test_handle_duplicate_edges_fixed_fixed(self): + edges = [ + ("A", "B"), + ("B", "C"), + ("C", "A"), + ("C", "D"), + ("D", "E"), + ("E", "F"), + ("F", "C"), + ("F", "G"), + ("A", "B"), + ] + expected_output = ["A", "B", "C", "D", "E", "F"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) + + @pytest.mark.parametrize("_", range(5)) + def test_handle_two_inputs_in_cycle(self, _): # noqa: PT019 + edges = [ + ("chat_input", "router"), + ("chat_input", "concatenate"), + ("concatenate", "router"), + ("router", "chat_input"), + ("text_output", "chat_output"), + ("router", "text_output"), + ] + expected_output = ["router", "chat_input", "concatenate"] + result = utils.find_cycle_vertices(edges) + assert sorted(result) == sorted(expected_output) diff --git a/src/backend/tests/unit/graph/test_graph.py b/src/backend/tests/unit/graph/test_graph.py index f6b85aa4e042..af9f009cc5c1 100644 --- a/src/backend/tests/unit/graph/test_graph.py +++ b/src/backend/tests/unit/graph/test_graph.py @@ -1,12 +1,8 @@ import copy import json -import pickle -from typing import Type, Union import pytest - from langflow.graph import Graph -from langflow.graph.edge.base import Edge from langflow.graph.graph.utils import ( find_last_node, process_flow, @@ -18,7 +14,6 @@ ) from langflow.graph.vertex.base import Vertex from langflow.initial_setup.setup import load_starter_projects -from langflow.utils.payload import get_root_vertex # Test cases for the graph module @@ -62,42 +57,11 @@ def sample_nodes(): ] -def get_node_by_type(graph, node_type: Type[Vertex]) -> Union[Vertex, None]: - """Get a node by type""" +def get_node_by_type(graph, node_type: type[Vertex]) -> Vertex | None: + """Get a node by type.""" return next((node for node in graph.vertices if isinstance(node, node_type)), None) -def test_graph_structure(basic_graph): - assert isinstance(basic_graph, Graph) - assert len(basic_graph.vertices) > 0 - assert len(basic_graph.edges) > 0 - for node in basic_graph.vertices: - assert isinstance(node, Vertex) - for edge in basic_graph.edges: - assert isinstance(edge, Edge) - source_vertex = basic_graph.get_vertex(edge.source_id) - target_vertex = basic_graph.get_vertex(edge.target_id) - assert source_vertex in basic_graph.vertices - assert target_vertex in basic_graph.vertices - - -def test_circular_dependencies(basic_graph): - assert isinstance(basic_graph, Graph) - - def check_circular(node, visited): - visited.add(node) - neighbors = basic_graph.get_vertices_with_target(node) - for neighbor in neighbors: - if neighbor in visited: - return True - if check_circular(neighbor, visited.copy()): - return True - return False - - for node in basic_graph.vertices: - assert not check_circular(node, set()) - - def test_invalid_node_types(): graph_data = { "nodes": [ @@ -115,125 +79,11 @@ def test_invalid_node_types(): ], "edges": [], } - with pytest.raises(Exception): - g = Graph() + g = Graph() + with pytest.raises(KeyError): g.add_nodes_and_edges(graph_data["nodes"], graph_data["edges"]) -def test_get_vertices_with_target(basic_graph): - """Test getting connected nodes""" - assert isinstance(basic_graph, Graph) - # Get root node - root = get_root_vertex(basic_graph) - assert root is not None - connected_nodes = basic_graph.get_vertices_with_target(root.id) - assert connected_nodes is not None - - -def test_get_node_neighbors_basic(basic_graph): - """Test getting node neighbors""" - - assert isinstance(basic_graph, Graph) - # Get root node - root = get_root_vertex(basic_graph) - assert root is not None - neighbors = basic_graph.get_vertex_neighbors(root) - assert neighbors is not None - assert isinstance(neighbors, dict) - # Root Node is an Agent, it requires an LLMChain and tools - # We need to check if there is a Chain in the one of the neighbors' - # data attribute in the type key - assert any("ConversationBufferMemory" in neighbor.data["type"] for neighbor, val in neighbors.items() if val) - - assert any("OpenAI" in neighbor.data["type"] for neighbor, val in neighbors.items() if val) - - -def test_get_node(basic_graph): - """Test getting a single node""" - node_id = basic_graph.vertices[0].id - node = basic_graph.get_vertex(node_id) - assert isinstance(node, Vertex) - assert node.id == node_id - - -def test_build_nodes(basic_graph): - """Test building nodes""" - - assert len(basic_graph.vertices) == len(basic_graph._vertices) - for node in basic_graph.vertices: - assert isinstance(node, Vertex) - - -def test_build_edges(basic_graph): - """Test building edges""" - assert len(basic_graph.edges) == len(basic_graph._edges) - for edge in basic_graph.edges: - assert isinstance(edge, Edge) - assert isinstance(edge.source_id, str) - assert isinstance(edge.target_id, str) - - -def test_get_root_vertex(client, basic_graph, complex_graph): - """Test getting root node""" - assert isinstance(basic_graph, Graph) - root = get_root_vertex(basic_graph) - assert root is not None - assert isinstance(root, Vertex) - assert root.data["type"] == "TimeTravelGuideChain" - # For complex example, the root node is a ZeroShotAgent too - assert isinstance(complex_graph, Graph) - root = get_root_vertex(complex_graph) - assert root is not None - assert isinstance(root, Vertex) - assert root.data["type"] == "ZeroShotAgent" - - -def test_validate_edges(basic_graph): - """Test validating edges""" - - assert isinstance(basic_graph, Graph) - # all edges should be valid - assert all(edge.valid for edge in basic_graph.edges) - - -def test_matched_type(basic_graph): - """Test matched type attribute in Edge""" - assert isinstance(basic_graph, Graph) - # all edges should be valid - assert all(edge.valid for edge in basic_graph.edges) - # all edges should have a matched_type attribute - assert all(hasattr(edge, "matched_type") for edge in basic_graph.edges) - # The matched_type attribute should be in the source_types attr - assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges) - - -def test_build_params(basic_graph): - """Test building params""" - - assert isinstance(basic_graph, Graph) - # all edges should be valid - assert all(edge.valid for edge in basic_graph.edges) - # all edges should have a matched_type attribute - assert all(hasattr(edge, "matched_type") for edge in basic_graph.edges) - # The matched_type attribute should be in the source_types attr - assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges) - # Get the root node - root = get_root_vertex(basic_graph) - # Root node is a TimeTravelGuideChain - # which requires an llm and memory - assert root is not None - assert isinstance(root.params, dict) - assert "llm" in root.params - assert "memory" in root.params - - -# def test_wrapper_node_build(openapi_graph): -# wrapper_node = get_node_by_type(openapi_graph, WrapperVertex) -# assert wrapper_node is not None -# built_object = wrapper_node.build() -# assert built_object is not None - - def test_find_last_node(grouped_chat_json_flow): grouped_chat_data = json.loads(grouped_chat_json_flow).get("data") nodes, edges = grouped_chat_data["nodes"], grouped_chat_data["edges"] @@ -280,10 +130,10 @@ def test_process_flow_one_group(one_grouped_chat_json_flow): node_data = group_node["data"]["node"] assert node_data.get("flow") is not None template_data = node_data["template"] - assert any("openai_api_key" in key for key in template_data.keys()) + assert any("openai_api_key" in key for key in template_data) # Get the openai_api_key dict openai_api_key = next( - (template_data[key] for key in template_data.keys() if "openai_api_key" in key), + (template_data[key] for key in template_data if "openai_api_key" in key), None, ) assert openai_api_key is not None @@ -368,8 +218,7 @@ def test_update_target_handle_proxy(): } } g_nodes = [{"id": "some_id", "data": {"node": {"flow": None}}}] - group_node_id = "group_id" - updated_edge = update_target_handle(new_edge, g_nodes, group_node_id) + updated_edge = update_target_handle(new_edge, g_nodes) assert updated_edge["data"]["targetHandle"] == new_edge["data"]["targetHandle"] @@ -407,13 +256,12 @@ def test_update_source_handle(): assert updated_edge["data"]["sourceHandle"]["id"] == "last_node" -@pytest.mark.asyncio -async def test_pickle_graph(): +def test_serialize_graph(): starter_projects = load_starter_projects() data = starter_projects[0][1]["data"] graph = Graph.from_payload(data) assert isinstance(graph, Graph) - pickled = pickle.dumps(graph) - assert pickled is not None - unpickled = pickle.loads(pickled) - assert unpickled is not None + serialized = graph.dumps() + assert serialized is not None + assert isinstance(serialized, str) + assert len(serialized) > 0 diff --git a/src/backend/tests/unit/helpers/__init__.py b/src/backend/tests/unit/helpers/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/helpers/test_base_model_from_schema.py b/src/backend/tests/unit/helpers/test_base_model_from_schema.py new file mode 100644 index 000000000000..d07a4908e0a3 --- /dev/null +++ b/src/backend/tests/unit/helpers/test_base_model_from_schema.py @@ -0,0 +1,159 @@ +# Generated by qodo Gen + +from typing import Any + +import pytest +from langflow.helpers.base_model import build_model_from_schema +from pydantic import BaseModel +from pydantic_core import PydanticUndefined + + +class TestBuildModelFromSchema: + # Successfully creates a Pydantic model from a valid schema + def test_create_model_from_valid_schema(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value", "description": "A string field"}, + {"name": "field2", "type": "int", "default": 0, "description": "An integer field"}, + {"name": "field3", "type": "bool", "default": False, "description": "A boolean field"}, + ] + model = build_model_from_schema(schema) + instance = model(field1="test", field2=123, field3=True) + assert instance.field1 == "test" + assert instance.field2 == 123 + assert instance.field3 is True + + # Handles empty schema gracefully without errors + def test_handle_empty_schema(self): + schema = [] + model = build_model_from_schema(schema) + instance = model() + assert instance is not None + + # Ensure the model created from schema has the expected attributes by checking on an instance + def test_handles_multiple_fields_fixed_with_instance_check(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1"}, + {"name": "field2", "type": "int", "default": 42}, + {"name": "field3", "type": "list", "default": [1, 2, 3]}, + {"name": "field4", "type": "dict", "default": {"key": "value"}}, + ] + + model = build_model_from_schema(schema) + model_instance = model(field1="test", field2=123, field3=[1, 2, 3], field4={"key": "value"}) + + assert issubclass(model, BaseModel) + assert hasattr(model_instance, "field1") + assert hasattr(model_instance, "field2") + assert hasattr(model_instance, "field3") + assert hasattr(model_instance, "field4") + + # Correctly accesses descriptions using the recommended fix + def test_correctly_accesses_descriptions_recommended_fix(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1", "description": "Description for field1"}, + {"name": "field2", "type": "int", "default": 42, "description": "Description for field2"}, + {"name": "field3", "type": "list", "default": [1, 2, 3], "description": "Description for field3"}, + {"name": "field4", "type": "dict", "default": {"key": "value"}, "description": "Description for field4"}, + ] + + model = build_model_from_schema(schema) + + assert model.model_fields["field1"].description == "Description for field1" + assert model.model_fields["field2"].description == "Description for field2" + assert model.model_fields["field3"].description == "Description for field3" + assert model.model_fields["field4"].description == "Description for field4" + + # Supports both single and multiple type annotations + def test_supports_single_and_multiple_type_annotations(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1", "description": "Description 1"}, + {"name": "field2", "type": "list", "default": [1, 2, 3], "description": "Description 2", "multiple": True}, + {"name": "field3", "type": "int", "default": 100, "description": "Description 3"}, + ] + model_type = build_model_from_schema(schema) + assert issubclass(model_type, BaseModel) + + # Manages unknown field types by defaulting to Any + def test_manages_unknown_field_types(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1"}, + {"name": "field2", "type": "unknown_type", "default": "default_value2"}, + ] + with pytest.raises(ValueError, match="Invalid type: unknown_type"): + build_model_from_schema(schema) + + # Confirms that the function raises a specific exception for invalid input + def test_raises_error_for_invalid_input_different_exception_with_specific_exception(self): + schema = [{"name": "field1", "type": "invalid_type", "default": "default_value"}] + with pytest.raises(ValueError, match="Invalid type: invalid_type"): + build_model_from_schema(schema) + + # Processes schemas with missing optional keys like description or multiple + def test_process_schema_missing_optional_keys_updated(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1"}, + {"name": "field2", "type": "int", "default": 0, "description": "Field 2 description"}, + {"name": "field3", "type": "list", "default": [], "multiple": True}, + {"name": "field4", "type": "dict", "default": {}, "description": "Field 4 description", "multiple": True}, + ] + result_model = build_model_from_schema(schema) + assert result_model.__annotations__["field1"] == str # noqa: E721 + assert result_model.model_fields["field1"].description == "" + assert result_model.__annotations__["field2"] == int # noqa: E721 + assert result_model.model_fields["field2"].description == "Field 2 description" + assert result_model.__annotations__["field3"] == list[list[Any]] + assert result_model.model_fields["field3"].description == "" + assert result_model.__annotations__["field4"] == list[dict[str, Any]] + assert result_model.model_fields["field4"].description == "Field 4 description" + + # Deals with schemas containing fields with None as default values + def test_schema_fields_with_none_default(self): + schema = [ + {"name": "field1", "type": "str", "default": None, "description": "Field 1 description"}, + {"name": "field2", "type": "int", "default": None, "description": "Field 2 description"}, + {"name": "field3", "type": "list", "default": None, "description": "Field 3 description", "multiple": True}, + ] + model = build_model_from_schema(schema) + assert model.model_fields["field1"].default == PydanticUndefined + assert model.model_fields["field2"].default == PydanticUndefined + assert model.model_fields["field3"].default == PydanticUndefined + + # Checks for proper handling of nested list and dict types + def test_nested_list_and_dict_types_handling(self): + schema = [ + {"name": "field1", "type": "list", "default": [], "description": "list field", "multiple": True}, + {"name": "field2", "type": "dict", "default": {}, "description": "Dict field"}, + ] + model_type = build_model_from_schema(schema) + assert issubclass(model_type, BaseModel) + + # Verifies that the function can handle large schemas efficiently + def test_handle_large_schemas_efficiently(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1", "description": "Description 1"}, + {"name": "field2", "type": "int", "default": 100, "description": "Description 2"}, + {"name": "field3", "type": "list", "default": [1, 2, 3], "description": "Description 3", "multiple": True}, + {"name": "field4", "type": "dict", "default": {"key": "value"}, "description": "Description 4"}, + ] + model_type = build_model_from_schema(schema) + assert issubclass(model_type, BaseModel) + + # Ensures that the function returns a valid Pydantic model class + def test_returns_valid_model_class(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1", "description": "Description for field1"}, + {"name": "field2", "type": "int", "default": 42, "description": "Description for field2", "multiple": True}, + ] + model_class = build_model_from_schema(schema) + assert issubclass(model_class, BaseModel) + + # Validates that the last occurrence of a duplicate field name defines the type in the schema + def test_no_duplicate_field_names_fixed_fixed(self): + schema = [ + {"name": "field1", "type": "str", "default": "default_value1"}, + {"name": "field2", "type": "int", "default": 0}, + {"name": "field1", "type": "float", "default": 0.0}, # Duplicate field name + ] + model = build_model_from_schema(schema) + assert model.__annotations__["field1"] == float # noqa: E721 + assert model.__annotations__["field2"] == int # noqa: E721 diff --git a/src/backend/tests/unit/initial_setup/__init__.py b/src/backend/tests/unit/initial_setup/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/initial_setup/starter_projects/__init__.py b/src/backend/tests/unit/initial_setup/starter_projects/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py index a5fe1b700314..32f95759abb9 100644 --- a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py +++ b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py @@ -1,15 +1,18 @@ +import operator from collections import deque +from typing import TYPE_CHECKING import pytest - -from langflow.components.helpers.Memory import MemoryComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent +from langflow.components.helpers.memory import MemoryComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.prompts import PromptComponent from langflow.graph import Graph from langflow.graph.graph.constants import Finish -from langflow.graph.graph.schema import GraphDump + +if TYPE_CHECKING: + from langflow.graph.graph.schema import GraphDump @pytest.fixture @@ -30,23 +33,36 @@ def memory_chatbot_graph(): openai_component.set( input_value=prompt_component.build_prompt, max_tokens=100, temperature=0.1, api_key="test_api_key" ) - openai_component.get_output("text_output").value = "Mock response" + openai_component.set_on_output(name="text_output", value="Mock response", cache=True) chat_output = ChatOutput(_id="chat_output") chat_output.set(input_value=openai_component.text_response) graph = Graph(chat_input, chat_output) + assert graph.in_degree_map == {"chat_output": 1, "prompt": 2, "openai": 1, "chat_input": 0, "chat_memory": 0} return graph +@pytest.mark.usefixtures("client") def test_memory_chatbot(memory_chatbot_graph): # Now we run step by step expected_order = deque(["chat_input", "chat_memory", "prompt", "openai", "chat_output"]) + assert memory_chatbot_graph.in_degree_map == { + "chat_output": 1, + "prompt": 2, + "openai": 1, + "chat_input": 0, + "chat_memory": 0, + } + assert memory_chatbot_graph.vertices_layers == [["prompt"], ["openai"], ["chat_output"]] + assert memory_chatbot_graph.first_layer == ["chat_input", "chat_memory"] + for step in expected_order: result = memory_chatbot_graph.step() if isinstance(result, Finish): break - assert step == result.vertex.id + + assert step == result.vertex.id, (memory_chatbot_graph.in_degree_map, memory_chatbot_graph.vertices_layers) def test_memory_chatbot_dump_structure(memory_chatbot_graph: Graph): @@ -88,7 +104,7 @@ def test_memory_chatbot_dump_components_and_edges(memory_chatbot_graph: Graph): edges = data_dict["edges"] # sort the nodes by id - nodes = sorted(nodes, key=lambda x: x["id"]) + nodes = sorted(nodes, key=operator.itemgetter("id")) # Check each node assert nodes[0]["data"]["type"] == "ChatInput" diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py index 076b0cb3b194..967e5eb927e5 100644 --- a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py +++ b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py @@ -1,27 +1,20 @@ import copy -from collections import Counter, defaultdict +import operator from textwrap import dedent import pytest - -from langflow.components.data.File import FileComponent -from langflow.components.embeddings.OpenAIEmbeddings import OpenAIEmbeddingsComponent -from langflow.components.helpers.ParseData import ParseDataComponent -from langflow.components.helpers.SplitText import SplitTextComponent -from langflow.components.inputs.ChatInput import ChatInput -from langflow.components.models.OpenAIModel import OpenAIModelComponent -from langflow.components.outputs.ChatOutput import ChatOutput -from langflow.components.prompts.Prompt import PromptComponent -from langflow.components.vectorstores.AstraDB import AstraVectorStoreComponent -from langflow.graph.graph.base import Graph +from langflow.components.data import FileComponent +from langflow.components.embeddings import OpenAIEmbeddingsComponent +from langflow.components.inputs import ChatInput +from langflow.components.models import OpenAIModelComponent +from langflow.components.outputs import ChatOutput +from langflow.components.processing import ParseDataComponent +from langflow.components.processing.split_text import SplitTextComponent +from langflow.components.prompts import PromptComponent +from langflow.components.vectorstores import AstraVectorStoreComponent +from langflow.graph import Graph from langflow.graph.graph.constants import Finish -from langflow.graph.graph.schema import VertexBuildResult -from langflow.schema.data import Data - - -@pytest.fixture -def client(): - pass +from langflow.schema import Data @pytest.fixture @@ -29,7 +22,7 @@ def ingestion_graph(): # Ingestion Graph file_component = FileComponent(_id="file-123") file_component.set(path="test.txt") - file_component.set_on_output("data", value=Data(text="This is a test file.")) + file_component.set_on_output(name="data", value=Data(text="This is a test file."), cache=True) text_splitter = SplitTextComponent(_id="text-splitter-123") text_splitter.set(data_inputs=file_component.load_file) openai_embeddings = OpenAIEmbeddingsComponent(_id="openai-embeddings-123") @@ -41,13 +34,12 @@ def ingestion_graph(): embedding=openai_embeddings.build_embeddings, ingest_data=text_splitter.split_text, api_endpoint="https://astra.example.com", - token="token", + token="token", # noqa: S106 ) - vector_store.set_on_output("vector_store", value="mock_vector_store") - vector_store.set_on_output("base_retriever", value="mock_retriever") - vector_store.set_on_output("search_results", value=[Data(text="This is a test file.")]) - ingestion_graph = Graph(file_component, vector_store) - return ingestion_graph + vector_store.set_on_output(name="base_retriever", value="mock_retriever", cache=True) + vector_store.set_on_output(name="search_results", value=[Data(text="This is a test file.")], cache=True) + + return Graph(file_component, vector_store) @pytest.fixture @@ -60,19 +52,19 @@ def rag_graph(): rag_vector_store.set( search_input=chat_input.message_response, api_endpoint="https://astra.example.com", - token="token", + token="token", # noqa: S106 embedding=openai_embeddings.build_embeddings, ) # Mock search_documents rag_vector_store.set_on_output( - "search_results", + name="search_results", value=[ Data(data={"text": "Hello, world!"}), Data(data={"text": "Goodbye, world!"}), ], + cache=True, ) - rag_vector_store.set_on_output("base_retriever", value="mock_retriever") - rag_vector_store.set_on_output("vector_store", value="mock_vector_store") + rag_vector_store.set_on_output(name="base_retriever", value="mock_retriever", cache=True) parse_data = ParseDataComponent(_id="parse-data-123") parse_data.set(data=rag_vector_store.search_documents) prompt_component = PromptComponent(_id="prompt-123") @@ -88,17 +80,16 @@ def rag_graph(): openai_component = OpenAIModelComponent(_id="openai-123") openai_component.set(api_key="sk-123", openai_api_base="https://api.openai.com/v1") - openai_component.set_on_output("text_output", value="Hello, world!") + openai_component.set_on_output(name="text_output", value="Hello, world!", cache=True) openai_component.set(input_value=prompt_component.build_prompt) chat_output = ChatOutput(_id="chatoutput-123") chat_output.set(input_value=openai_component.text_response) - graph = Graph(start=chat_input, end=chat_output) - return graph + return Graph(start=chat_input, end=chat_output) -def test_vector_store_rag(ingestion_graph: Graph, rag_graph: Graph): +def test_vector_store_rag(ingestion_graph, rag_graph): assert ingestion_graph is not None ingestion_ids = [ "file-123", @@ -116,18 +107,10 @@ def test_vector_store_rag(ingestion_graph: Graph, rag_graph: Graph): "rag-vector-store-123", "openai-embeddings-124", ] - for ids, graph, len_results in zip([ingestion_ids, rag_ids], [ingestion_graph, rag_graph], [5, 8]): - results: list[VertexBuildResult] = [] - ids_count = Counter(ids) - results_id_count: dict[str, int] = defaultdict(int) - for result in graph.start(config={"output": {"cache": True}}): - results.append(result) - if hasattr(result, "vertex"): - results_id_count[result.vertex.id] += 1 - - assert ( - len(results) == len_results - ), f"Counts: {ids_count} != {results_id_count}, Diff: {set(ids_count.keys()) - set(results_id_count.keys())}" + for ids, graph, len_results in [(ingestion_ids, ingestion_graph, 5), (rag_ids, rag_graph, 8)]: + results = list(graph.start()) + + assert len(results) == len_results vids = [result.vertex.id for result in results if hasattr(result, "vertex")] assert all(vid in ids for vid in vids), f"Diff: {set(vids) - set(ids)}" assert results[-1] == Finish() @@ -141,10 +124,11 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph): ingestion_data = ingestion_graph_dump["data"] ingestion_nodes = ingestion_data["nodes"] + assert len(ingestion_nodes) == 4 ingestion_edges = ingestion_data["edges"] # Sort nodes by id to check components - ingestion_nodes = sorted(ingestion_nodes, key=lambda x: x["id"]) + ingestion_nodes = sorted(ingestion_nodes, key=operator.itemgetter("id")) # Check components in the ingestion graph assert ingestion_nodes[0]["data"]["type"] == "File" @@ -182,7 +166,7 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph): rag_edges = rag_data["edges"] # Sort nodes by id to check components - rag_nodes = sorted(rag_nodes, key=lambda x: x["id"]) + rag_nodes = sorted(rag_nodes, key=operator.itemgetter("id")) # Check components in the RAG graph assert rag_nodes[0]["data"]["type"] == "ChatInput" @@ -229,12 +213,14 @@ def test_vector_store_rag_add(ingestion_graph: Graph, rag_graph: Graph): rag_graph_copy = copy.deepcopy(rag_graph) ingestion_graph_copy += rag_graph_copy - assert ( - len(ingestion_graph_copy.vertices) == len(ingestion_graph.vertices) + len(rag_graph.vertices) - ), f"Vertices mismatch: {len(ingestion_graph_copy.vertices)} != {len(ingestion_graph.vertices)} + {len(rag_graph.vertices)}" - assert len(ingestion_graph_copy.edges) == len(ingestion_graph.edges) + len( - rag_graph.edges - ), f"Edges mismatch: {len(ingestion_graph_copy.edges)} != {len(ingestion_graph.edges)} + {len(rag_graph.edges)}" + assert len(ingestion_graph_copy.vertices) == len(ingestion_graph.vertices) + len(rag_graph.vertices), ( + f"Vertices mismatch: {len(ingestion_graph_copy.vertices)} " + f"!= {len(ingestion_graph.vertices)} + {len(rag_graph.vertices)}" + ) + assert len(ingestion_graph_copy.edges) == len(ingestion_graph.edges) + len(rag_graph.edges), ( + f"Edges mismatch: {len(ingestion_graph_copy.edges)} " + f"!= {len(ingestion_graph.edges)} + {len(rag_graph.edges)}" + ) combined_graph_dump = ingestion_graph_copy.dump( name="Combined Graph", description="Graph for data ingestion and RAG", endpoint_name="combined" @@ -245,7 +231,7 @@ def test_vector_store_rag_add(ingestion_graph: Graph, rag_graph: Graph): combined_edges = combined_data["edges"] # Sort nodes by id to check components - combined_nodes = sorted(combined_nodes, key=lambda x: x["id"]) + combined_nodes = sorted(combined_nodes, key=operator.itemgetter("id")) # Expected components in the combined graph (both ingestion and RAG nodes) expected_nodes = sorted( @@ -262,10 +248,10 @@ def test_vector_store_rag_add(ingestion_graph: Graph, rag_graph: Graph): {"id": "prompt-123", "type": "Prompt"}, {"id": "rag-vector-store-123", "type": "AstraDB"}, ], - key=lambda x: x["id"], + key=operator.itemgetter("id"), ) - for expected_node, combined_node in zip(expected_nodes, combined_nodes): + for expected_node, combined_node in zip(expected_nodes, combined_nodes, strict=True): assert combined_node["data"]["type"] == expected_node["type"] assert combined_node["id"] == expected_node["id"] diff --git a/src/backend/tests/unit/inputs/__init__.py b/src/backend/tests/unit/inputs/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/inputs/test_inputs.py b/src/backend/tests/unit/inputs/test_inputs.py index b409e82cacae..8d4386bcb738 100644 --- a/src/backend/tests/unit/inputs/test_inputs.py +++ b/src/backend/tests/unit/inputs/test_inputs.py @@ -1,8 +1,7 @@ import pytest -from pydantic import ValidationError - from langflow.inputs.inputs import ( BoolInput, + CodeInput, DataInput, DictInput, DropdownInput, @@ -20,14 +19,10 @@ SecretStrInput, StrInput, TableInput, - instantiate_input, ) +from langflow.inputs.utils import instantiate_input from langflow.schema.message import Message - - -@pytest.fixture -def client(): - pass +from pydantic import ValidationError def test_table_input_valid(): @@ -75,7 +70,7 @@ def test_instantiate_input_valid(): def test_instantiate_input_invalid(): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid input type: InvalidInput"): instantiate_input("InvalidInput", {"name": "invalid_input", "value": "This is a string"}) @@ -99,6 +94,11 @@ def test_prompt_input_valid(): assert prompt_input.value == "Enter your name" +def test_code_input_valid(): + code_input = CodeInput(name="valid_code", value="def hello():\n print('Hello, World!')") + assert code_input.value == "def hello():\n print('Hello, World!')" + + def test_multiline_input_valid(): multiline_input = MultilineInput(name="valid_multiline", value="This is a\nmultiline input") assert multiline_input.value == "This is a\nmultiline input" @@ -214,12 +214,15 @@ def test_instantiate_input_comprehensive(): "FloatInput": {"name": "float_input", "value": 10.5}, "BoolInput": {"name": "bool_input", "value": True}, "DictInput": {"name": "dict_input", "value": {"key": "value"}}, - "MultiselectInput": {"name": "multiselect_input", "value": ["option1", "option2"]}, + "MultiselectInput": { + "name": "multiselect_input", + "value": ["option1", "option2"], + }, } for input_type, data in valid_data.items(): input_instance = instantiate_input(input_type, data) assert isinstance(input_instance, InputTypesMap[input_type]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid input type: InvalidInput"): instantiate_input("InvalidInput", {"name": "invalid_input", "value": "Invalid"}) diff --git a/src/backend/tests/unit/io/__init__.py b/src/backend/tests/unit/io/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/io/test_io_schema.py b/src/backend/tests/unit/io/test_io_schema.py index 63da9be0b079..0c7497a1b94f 100644 --- a/src/backend/tests/unit/io/test_io_schema.py +++ b/src/backend/tests/unit/io/test_io_schema.py @@ -1,14 +1,10 @@ -from typing import List, Literal +from typing import TYPE_CHECKING, Literal import pytest -from pydantic.fields import FieldInfo +from langflow.components.inputs import ChatInput -from langflow.components.inputs.ChatInput import ChatInput - - -@pytest.fixture -def client(): - pass +if TYPE_CHECKING: + from pydantic.fields import FieldInfo def test_create_input_schema(): @@ -96,7 +92,7 @@ def test_is_list_attribute_processing(self): input_instance = StrInput(name="test_field", is_list=True) schema = create_input_schema([input_instance]) field_info: FieldInfo = schema.model_fields["test_field"] - assert field_info.annotation == List[str] + assert field_info.annotation == list[str] # Input with options attribute is processed correctly def test_options_attribute_processing(self): @@ -116,7 +112,7 @@ def test_non_standard_field_types_handling(self): input_instance = FileInput(name="file_field") schema = create_input_schema([input_instance]) field_info = schema.model_fields["file_field"] - assert field_info.annotation == str + assert field_info.annotation is str # Inputs with mixed required and optional fields are processed correctly def test_mixed_required_optional_fields_processing(self): @@ -184,7 +180,7 @@ def test_is_list_handling(self): input_instance = StrInput(name="test_field", is_list=True) schema = create_input_schema([input_instance]) field_info = schema.model_fields["test_field"] - assert field_info.annotation == List[str] + assert field_info.annotation == list[str] # Converting FieldTypes to corresponding Python types def test_field_types_conversion(self): @@ -194,7 +190,7 @@ def test_field_types_conversion(self): input_instance = IntInput(name="int_field") schema = create_input_schema([input_instance]) field_info = schema.model_fields["int_field"] - assert field_info.annotation == int + assert field_info.annotation is int # Use 'is' for type comparison # Setting default values for non-required fields def test_default_values_for_non_required_fields(self): @@ -218,18 +214,6 @@ def test_missing_attributes_handling(self): assert field_info.description == "" # Handling invalid field types - def test_invalid_field_types_handling(self): - from langflow.inputs.inputs import StrInput - from langflow.io.schema import create_input_schema - - class InvalidFieldType: - pass - - input_instance = StrInput(name="test_field") - input_instance.field_type = InvalidFieldType() - - with pytest.raises(KeyError): - create_input_schema([input_instance]) # Handling input types with None as default value def test_none_default_value_handling(self): diff --git a/src/backend/tests/unit/io/test_table_schema.py b/src/backend/tests/unit/io/test_table_schema.py new file mode 100644 index 000000000000..423731943098 --- /dev/null +++ b/src/backend/tests/unit/io/test_table_schema.py @@ -0,0 +1,54 @@ +# Generated by qodo Gen + +import pytest +from langflow.schema.table import Column, FormatterType + + +class TestColumn: + # Creating a Column instance without display_name sets it to the name + def test_create_column_without_display_name(self): + column = Column(name="test_column") + assert column.display_name == "test_column" + + # Creating a Column instance with valid formatter values + def test_create_column_with_valid_formatter(self): + column = Column(display_name="Test Column", name="test_column", formatter="date") + assert column.formatter == FormatterType.date + + # Formatter is set based on provided formatter value + def test_formatter_set_based_on_value(self): + column = Column(display_name="Test Column", name="test_column", formatter="int") + assert column.formatter == FormatterType.number + + # Default values for sortable and filterable are set to True + def test_default_sortable_filterable(self): + column = Column(display_name="Test Column", name="test_column") + assert column.sortable is True + assert column.filterable is True + + # Ensure formatter field is correctly set when provided a FormatterType + def test_formatter_explicitly_set_to_enum(self): + column = Column(display_name="Date Column", name="date_column", formatter=FormatterType.date) + assert column.formatter == FormatterType.date + + # Invalid formatter raises ValueError + def test_invalid_formatter_raises_value_error(self): + with pytest.raises(ValueError, match="'invalid' is not a valid FormatterType"): + Column(display_name="Invalid Column", name="invalid_column", formatter="invalid") + + # Formatter is None when not provided + def test_formatter_none_when_not_provided(self): + column = Column(display_name="Test Column", name="test_column") + assert column.formatter is None + + # Description and default can be set + def test_description_and_default(self): + column = Column( + display_name="Test Column", name="test_column", description="A test column", default="default_value" + ) + assert column.description == "A test column" + assert column.default == "default_value" + + def test_create_with_type_instead_of_formatter(self): + column = Column(display_name="Test Column", name="test_column", type="date") + assert column.formatter == FormatterType.date diff --git a/src/backend/tests/unit/schema/__init__.py b/src/backend/tests/unit/schema/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/schema/test_content_block.py b/src/backend/tests/unit/schema/test_content_block.py new file mode 100644 index 000000000000..05a1ee0cf4f8 --- /dev/null +++ b/src/backend/tests/unit/schema/test_content_block.py @@ -0,0 +1,87 @@ +import pytest +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import CodeContent, ErrorContent, JSONContent, MediaContent, TextContent, ToolContent + + +class TestContentBlock: + def test_initialize_with_valid_title_and_contents(self): + """Test initializing ContentBlock with valid title and contents.""" + valid_title = "Sample Title" + valid_contents = [TextContent(type="text", text="Sample text")] + content_block = ContentBlock(title=valid_title, contents=valid_contents) + + assert content_block.title == valid_title + assert len(content_block.contents) == 1 + assert isinstance(content_block.contents[0], TextContent) + assert content_block.contents[0].text == "Sample text" + assert content_block.allow_markdown is True + assert content_block.media_url is None + + def test_initialize_with_empty_contents(self): + """Test initializing ContentBlock with empty contents list.""" + valid_title = "Sample Title" + empty_contents = [] + content_block = ContentBlock(title=valid_title, contents=empty_contents) + + assert content_block.title == valid_title + assert content_block.contents == empty_contents + assert content_block.allow_markdown is True + assert content_block.media_url is None + + def test_validate_different_content_types(self): + """Test ContentBlock with different content types.""" + contents = [ + TextContent(type="text", text="Sample text"), + CodeContent(type="code", code="print('hello')", language="python"), + ErrorContent(type="error", error="Sample error"), + JSONContent(type="json", data={"key": "value"}), + MediaContent(type="media", urls=["http://example.com/image.jpg"]), + ToolContent(type="tool_use", output="Sample thought", name="test_tool", tool_input={"input": "test"}), + ] + + content_block = ContentBlock(title="Test", contents=contents) + assert len(content_block.contents) == 6 + assert isinstance(content_block.contents[0], TextContent) + assert isinstance(content_block.contents[1], CodeContent) + assert isinstance(content_block.contents[2], ErrorContent) + assert isinstance(content_block.contents[3], JSONContent) + assert isinstance(content_block.contents[4], MediaContent) + assert isinstance(content_block.contents[5], ToolContent) + + def test_invalid_contents_type(self): + """Test that providing contents as dict raises TypeError.""" + with pytest.raises(TypeError, match="Contents must be a list of ContentTypes"): + ContentBlock(title="Test", contents={"invalid": "content"}) + + def test_single_content_conversion(self): + """Test that single content item is converted to list.""" + single_content = TextContent(type="text", text="Single item") + content_block = ContentBlock(title="Test", contents=single_content) + assert isinstance(content_block.contents, list) + assert len(content_block.contents) == 1 + + def test_serialize_contents(self): + """Test serialization of contents to dict format.""" + contents = [ + TextContent(type="text", text="Sample text"), + CodeContent(type="code", code="print('hello')", language="python"), + ] + block = ContentBlock(title="Test Block", contents=contents) + serialized = block.serialize_contents(block.contents) + + assert isinstance(serialized, list) + assert len(serialized) == 2 + assert serialized[0]["type"] == "text" + assert serialized[1]["type"] == "code" + assert serialized[1]["language"] == "python" + + def test_media_url_handling(self): + """Test handling of media_url field.""" + media_urls = ["http://example.com/1.jpg", "http://example.com/2.jpg"] + block = ContentBlock(title="Test", contents=[TextContent(type="text", text="Sample")], media_url=media_urls) + assert block.media_url == media_urls + + def test_allow_markdown_override(self): + """Test overriding allow_markdown default value.""" + block = ContentBlock(title="Test", contents=[], allow_markdown=False) + assert block.allow_markdown is False diff --git a/src/backend/tests/unit/schema/test_content_types.py b/src/backend/tests/unit/schema/test_content_types.py new file mode 100644 index 000000000000..d69a734a438d --- /dev/null +++ b/src/backend/tests/unit/schema/test_content_types.py @@ -0,0 +1,164 @@ +from langflow.schema.content_types import ( + BaseContent, + CodeContent, + ErrorContent, + JSONContent, + MediaContent, + TextContent, + ToolContent, +) + + +class TestBaseContent: + def test_base_content_serialization(self): + """Test BaseContent serialization methods.""" + content = BaseContent(type="test") + + # Test to_dict method + dict_content = content.to_dict() + assert isinstance(dict_content, dict) + assert dict_content["type"] == "test" + + # Test from_dict method + reconstructed = BaseContent.from_dict(dict_content) + assert isinstance(reconstructed, BaseContent) + assert reconstructed.type == "test" + + def test_base_content_with_header(self): + """Test BaseContent with header information.""" + header = {"title": "Test Title", "icon": "test-icon"} + content = BaseContent(type="test", header=header) + assert content.header == header + assert content.header["title"] == "Test Title" + assert content.header["icon"] == "test-icon" + + def test_base_content_with_duration(self): + """Test BaseContent with duration field.""" + content = BaseContent(type="test", duration=1000) + assert content.duration == 1000 + + +class TestErrorContent: + def test_error_content_creation(self): + """Test ErrorContent creation and fields.""" + error = ErrorContent( + component="test_component", + field="test_field", + reason="test failed", + solution="fix it", + traceback="traceback info", + ) + assert error.type == "error" + assert error.component == "test_component" + assert error.field == "test_field" + assert error.reason == "test failed" + assert error.solution == "fix it" + assert error.traceback == "traceback info" + + def test_error_content_optional_fields(self): + """Test ErrorContent with minimal fields.""" + error = ErrorContent() + assert error.type == "error" + assert error.component is None + assert error.field is None + + +class TestTextContent: + def test_text_content_creation(self): + """Test TextContent creation and fields.""" + text = TextContent(text="Hello, world!") + assert text.type == "text" + assert text.text == "Hello, world!" + + def test_text_content_with_duration(self): + """Test TextContent with duration.""" + text = TextContent(text="Hello", duration=500) + assert text.duration == 500 + + +class TestMediaContent: + def test_media_content_creation(self): + """Test MediaContent creation and fields.""" + urls = ["http://example.com/1.jpg", "http://example.com/2.jpg"] + media = MediaContent(urls=urls, caption="Test images") + assert media.type == "media" + assert media.urls == urls + assert media.caption == "Test images" + + def test_media_content_without_caption(self): + """Test MediaContent without caption.""" + media = MediaContent(urls=["http://example.com/1.jpg"]) + assert media.caption is None + + +class TestJSONContent: + def test_json_content_creation(self): + """Test JSONContent creation and fields.""" + data = {"key": "value", "nested": {"inner": "data"}} + json_content = JSONContent(data=data) + assert json_content.type == "json" + assert json_content.data == data + + def test_json_content_complex_data(self): + """Test JSONContent with complex data structures.""" + data = {"string": "text", "number": 42, "list": [1, 2, 3], "nested": {"a": 1, "b": 2}} + json_content = JSONContent(data=data) + assert json_content.data == data + + +class TestCodeContent: + def test_code_content_creation(self): + """Test CodeContent creation and fields.""" + code = CodeContent(code="print('hello')", language="python", title="Test Script") + assert code.type == "code" + assert code.code == "print('hello')" + assert code.language == "python" + assert code.title == "Test Script" + + def test_code_content_without_title(self): + """Test CodeContent without title.""" + code = CodeContent(code="console.log('hello')", language="javascript") + assert code.title is None + + +class TestToolContent: + def test_tool_content_creation(self): + """Test ToolContent creation and fields.""" + tool = ToolContent(name="test_tool", tool_input={"param": "value"}, output="result", duration=100) + assert tool.type == "tool_use" + assert tool.name == "test_tool" + assert tool.tool_input == {"param": "value"} + assert tool.output == "result" + assert tool.duration == 100 + + def test_tool_content_with_error(self): + """Test ToolContent with error field.""" + tool = ToolContent(name="test_tool", tool_input={}, error="Something went wrong") + assert tool.error == "Something went wrong" + assert tool.output is None + + def test_tool_content_minimal(self): + """Test ToolContent with minimal fields.""" + tool = ToolContent() + assert tool.type == "tool_use" + assert tool.tool_input == {} + assert tool.name is None + assert tool.output is None + assert tool.error is None + + +def test_content_type_discrimination(): + """Test that different content types are properly discriminated.""" + contents = [ + TextContent(text="Hello"), + CodeContent(code="print('hi')", language="python"), + ErrorContent(reason="test error"), + JSONContent(data={"test": "data"}), + MediaContent(urls=["http://example.com/image.jpg"]), + ToolContent(name="test_tool"), + ] + + assert all( + content.type == expected + for content, expected in zip(contents, ["text", "code", "error", "json", "media", "tool_use"], strict=False) + ) diff --git a/src/backend/tests/unit/schema/test_schema_data.py b/src/backend/tests/unit/schema/test_schema_data.py new file mode 100644 index 000000000000..6eff0ac68f53 --- /dev/null +++ b/src/backend/tests/unit/schema/test_schema_data.py @@ -0,0 +1,92 @@ +import pytest +from langchain_core.messages import AIMessage, HumanMessage +from langflow.schema.data import Data +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER + + +@pytest.fixture +def sample_image(tmp_path): + """Create a sample image file for testing.""" + image_path = tmp_path / "test_image.png" + # Create a small black 1x1 pixel PNG file + import base64 + + image_content = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAACklEQVR4nGMAAQAABQABDQottAAAAABJRU5ErkJggg==" + ) + image_path.write_bytes(image_content) + return image_path + + +class TestDataSchema: + def test_data_to_message_with_text_only(self): + """Test conversion of Data to Message with text only.""" + data = Data(data={"text": "Hello, world!", "sender": MESSAGE_SENDER_USER}) + message = data.to_lc_message() + assert isinstance(message, HumanMessage) + assert message.content == [{"type": "text", "text": "Hello, world!"}] + + def test_data_to_message_with_image(self, sample_image): + """Test conversion of Data to Message with text and image.""" + data = Data(data={"text": "Check out this image", "sender": MESSAGE_SENDER_USER, "files": [str(sample_image)]}) + message = data.to_lc_message() + + assert isinstance(message, HumanMessage) + assert isinstance(message.content, list) + assert len(message.content) == 2 + + # Check text content + assert message.content[0] == {"type": "text", "text": "Check out this image"} + + # Check image content + assert message.content[1]["type"] == "image_url" + assert "url" in message.content[1]["image_url"] + assert message.content[1]["image_url"]["url"].startswith("data:image/png;base64,") + + def test_data_to_message_with_multiple_images(self, sample_image, tmp_path): + """Test conversion of Data to Message with multiple images.""" + # Create a second image + second_image = tmp_path / "second_image.png" + second_image.write_bytes(sample_image.read_bytes()) + + data = Data( + data={ + "text": "Multiple images", + "sender": MESSAGE_SENDER_USER, + "files": [str(sample_image), str(second_image)], + } + ) + message = data.to_lc_message() + + assert isinstance(message, HumanMessage) + assert isinstance(message.content, list) + assert len(message.content) == 3 # text + 2 images + + # Check text content + assert message.content[0]["type"] == "text" + + # Check both images + assert message.content[1]["type"] == "image_url" + assert message.content[2]["type"] == "image_url" + assert all(content["image_url"]["url"].startswith("data:image/png;base64,") for content in message.content[1:]) + + def test_data_to_message_ai_response(self): + """Test conversion of Data to AI Message.""" + data = Data(data={"text": "AI response", "sender": MESSAGE_SENDER_AI}) + message = data.to_lc_message() + assert isinstance(message, AIMessage) + assert message.content == "AI response" + + def test_data_to_message_missing_required_keys(self): + """Test conversion fails with missing required keys.""" + data = Data(data={"incomplete": "data"}) + with pytest.raises(ValueError, match="Missing required keys"): + data.to_lc_message() + + def test_data_to_message_invalid_image_path(self, tmp_path): + """Test handling of invalid image path.""" + non_existent_image = tmp_path / "non_existent.png" + data = Data(data={"text": "Invalid image", "sender": MESSAGE_SENDER_USER, "files": [str(non_existent_image)]}) + + with pytest.raises(FileNotFoundError): + data.to_lc_message() diff --git a/src/backend/tests/unit/schema/test_schema_message.py b/src/backend/tests/unit/schema/test_schema_message.py index 846ca4d61b9f..f20eededb637 100644 --- a/src/backend/tests/unit/schema/test_schema_message.py +++ b/src/backend/tests/unit/schema/test_schema_message.py @@ -1,30 +1,179 @@ +import shutil +from pathlib import Path + import pytest +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts.chat import ChatPromptTemplate - from langflow.schema.message import Message +from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER +from platformdirs import user_cache_dir @pytest.fixture -def client(): - pass +def langflow_cache_dir(tmp_path): + """Create a temporary langflow cache directory.""" + cache_dir = tmp_path / "langflow" + cache_dir.mkdir(parents=True) + return cache_dir -@pytest.mark.asyncio -async def test_message_async_prompt_serialization(): - template = "Hello, {name}!" - message = await Message.from_template_and_variables(template, name="Langflow") - assert message.text == "Hello, Langflow!" +@pytest.fixture +def sample_image(langflow_cache_dir): + """Create a sample image file for testing.""" + # Create the test_flow directory in the cache + flow_dir = langflow_cache_dir / "test_flow" + flow_dir.mkdir(parents=True, exist_ok=True) - prompt = message.load_lc_prompt() - assert isinstance(prompt, ChatPromptTemplate) - assert prompt.messages[0].content == "Hello, Langflow!" + # Create the image in the flow directory + image_path = flow_dir / "test_image.png" + # Create a small black 1x1 pixel PNG file + import base64 + + image_content = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAACklEQVR4nGMAAQAABQABDQottAAAAABJRU5ErkJggg==" + ) + image_path.write_bytes(image_content) + + # Use platformdirs to get the cache directory + real_cache_dir = Path(user_cache_dir("langflow")) + real_cache_dir.mkdir(parents=True, exist_ok=True) + real_flow_dir = real_cache_dir / "test_flow" + real_flow_dir.mkdir(parents=True, exist_ok=True) + + # Copy the image to the real cache location + real_image_path = real_flow_dir / "test_image.png" + shutil.copy2(str(image_path), str(real_image_path)) + + return image_path def test_message_prompt_serialization(): template = "Hello, {name}!" - message = Message.sync_from_template_and_variables(template, name="Langflow") + message = Message.from_template(template, name="Langflow") assert message.text == "Hello, Langflow!" prompt = message.load_lc_prompt() assert isinstance(prompt, ChatPromptTemplate) assert prompt.messages[0].content == "Hello, Langflow!" + + +def test_message_from_human_text(): + """Test creating a message from human text.""" + text = "Hello, AI!" + message = Message(text=text, sender=MESSAGE_SENDER_USER) + lc_message = message.to_lc_message() + + assert isinstance(lc_message, HumanMessage) + assert lc_message.content == text + + +def test_message_from_ai_text(): + """Test creating a message from AI text.""" + text = "Hello, Human!" + message = Message(text=text, sender=MESSAGE_SENDER_AI) + lc_message = message.to_lc_message() + + assert isinstance(lc_message, AIMessage) + assert lc_message.content == text + + +def test_message_with_single_image(sample_image): + """Test creating a message with text and an image.""" + text = "Check out this image" + # Format the file path as expected: "flow_id/filename" + file_path = f"test_flow/{sample_image.name}" + message = Message(text=text, sender=MESSAGE_SENDER_USER, files=[file_path]) + lc_message = message.to_lc_message() + + assert isinstance(lc_message, HumanMessage) + assert isinstance(lc_message.content, list) + assert len(lc_message.content) == 2 + + # Check text content + assert lc_message.content[0] == {"type": "text", "text": text} + + # Check image content + assert lc_message.content[1]["type"] == "image_url" + assert "url" in lc_message.content[1]["image_url"] + assert lc_message.content[1]["image_url"]["url"].startswith("data:image/png;base64,") + + +def test_message_with_multiple_images(sample_image, langflow_cache_dir): + """Test creating a message with multiple images.""" + # Create a second image in the cache directory + flow_dir = langflow_cache_dir / "test_flow" + second_image = flow_dir / "second_image.png" + shutil.copy2(str(sample_image), str(second_image)) + + # Use platformdirs for the real cache location + real_cache_dir = Path(user_cache_dir("langflow")) / "test_flow" + real_cache_dir.mkdir(parents=True, exist_ok=True) + real_second_image = real_cache_dir / "second_image.png" + shutil.copy2(str(sample_image), str(real_second_image)) + + text = "Multiple images" + message = Message( + text=text, + sender=MESSAGE_SENDER_USER, + files=[f"test_flow/{sample_image.name}", f"test_flow/{second_image.name}"], + ) + lc_message = message.to_lc_message() + + assert isinstance(lc_message, HumanMessage) + assert isinstance(lc_message.content, list) + assert len(lc_message.content) == 3 # text + 2 images + + # Check text content + assert lc_message.content[0] == {"type": "text", "text": text} + + # Check both images + assert all( + content["type"] == "image_url" and content["image_url"]["url"].startswith("data:image/png;base64,") + for content in lc_message.content[1:] + ) + + +def test_message_with_invalid_image_path(): + """Test handling of invalid image path.""" + file_path = "test_flow/non_existent.png" + message = Message(text="Invalid image", sender=MESSAGE_SENDER_USER, files=[file_path]) + + with pytest.raises(FileNotFoundError): + message.to_lc_message() + + +def test_message_without_sender(): + """Test message creation without sender specification.""" + # Create message without sender + message = Message(text="Test message") + # Verify the message was created but has no sender + assert message.text == "Test message" + assert message.sender is None + + +def test_message_serialization(): + """Test message serialization to dict.""" + message = Message(text="Test message", sender=MESSAGE_SENDER_USER) + serialized = message.model_dump() + + assert serialized["text"] == "Test message" + assert serialized["sender"] == MESSAGE_SENDER_USER + + +def test_message_to_lc_without_sender(): + """Test converting a message without sender to langchain message.""" + message = Message(text="Test message") + # When no sender is specified, it defaults to HumanMessage + lc_message = message.to_lc_message() + assert isinstance(lc_message, HumanMessage) + assert lc_message.content == "Test message" + + +# Clean up the cache directory after all tests +@pytest.fixture(autouse=True) +def cleanup(): + yield + # Clean up the real cache directory after tests + cache_dir = Path(user_cache_dir("langflow")) + if cache_dir.exists(): + shutil.rmtree(str(cache_dir)) diff --git a/src/backend/tests/unit/services/__init__.py b/src/backend/tests/unit/services/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/services/variable/__init__.py b/src/backend/tests/unit/services/variable/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/services/variable/test_service.py b/src/backend/tests/unit/services/variable/test_service.py index 62e4914ab5bb..081c7147f29b 100644 --- a/src/backend/tests/unit/services/variable/test_service.py +++ b/src/backend/tests/unit/services/variable/test_service.py @@ -1,16 +1,14 @@ -from langflow.services.database.models.variable.model import VariableUpdate -import pytest +from datetime import datetime from unittest.mock import patch from uuid import uuid4 -from datetime import datetime -from sqlmodel import SQLModel, Session, create_engine -from langflow.services.deps import get_settings_service -from langflow.services.variable.service import GENERIC_TYPE, CREDENTIAL_TYPE, DatabaseVariableService - -@pytest.fixture -def client(): - pass +import pytest +from langflow.services.database.models.variable.model import VariableUpdate +from langflow.services.deps import get_settings_service +from langflow.services.settings.constants import VARIABLES_TO_GET_FROM_ENVIRONMENT +from langflow.services.variable.constants import CREDENTIAL_TYPE, GENERIC_TYPE +from langflow.services.variable.service import DatabaseVariableService +from sqlmodel import Session, SQLModel, create_engine @pytest.fixture @@ -27,26 +25,32 @@ def session(): yield session -@pytest.mark.skip(reason="Temporarily disabled") -def test_initialize_user_variables__donkey(service, session): +def test_initialize_user_variables__create_and_update(service, session): user_id = uuid4() - name = "OPENAI_API_KEY" - value = "donkey" - service.initialize_user_variables(user_id, session=session) - result = service.create_variable(user_id, "OPENAI_API_KEY", "donkey", session=session) - new_service = DatabaseVariableService(get_settings_service()) - new_service.initialize_user_variables(user_id, session=session) + field = "" + good_vars = {k: f"value{i}" for i, k in enumerate(VARIABLES_TO_GET_FROM_ENVIRONMENT)} + bad_vars = {"VAR1": "value1", "VAR2": "value2", "VAR3": "value3"} + env_vars = {**good_vars, **bad_vars} + + service.create_variable(user_id, "OPENAI_API_KEY", "outdate", session=session) + env_vars["OPENAI_API_KEY"] = "updated_value" - result = new_service.get_variable(user_id, name, "", session=session) + with patch.dict("os.environ", env_vars, clear=True): + service.initialize_user_variables(user_id=user_id, session=session) - assert result != value + variables = service.list_variables(user_id, session=session) + for name in variables: + value = service.get_variable(user_id, name, field, session=session) + assert value == env_vars[name] + + assert all(i in variables for i in good_vars) + assert all(i not in variables for i in bad_vars) def test_initialize_user_variables__not_found_variable(service, session): with patch("langflow.services.variable.service.DatabaseVariableService.create_variable") as m: m.side_effect = Exception() service.initialize_user_variables(uuid4(), session=session) - assert True @@ -68,19 +72,16 @@ def test_get_variable(service, session): assert result == value -def test_get_variable__ValueError(service, session): +def test_get_variable__valueerror(service, session): user_id = uuid4() name = "name" field = "" - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{name} variable not found."): service.get_variable(user_id, name, field, session) - assert name in str(exc.value) - assert "variable not found" in str(exc.value) - -def test_get_variable__TypeError(service, session): +def test_get_variable__typeerror(service, session): user_id = uuid4() name = "name" value = "value" @@ -138,25 +139,23 @@ def test_update_variable(service, session): assert isinstance(result.updated_at, datetime) -def test_update_variable__ValueError(service, session): +def test_update_variable__valueerror(service, session): user_id = uuid4() name = "name" value = "value" - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{name} variable not found."): service.update_variable(user_id, name, value, session=session) - assert name in str(exc.value) - assert "variable not found" in str(exc.value) - def test_update_variable_fields(service, session): user_id = uuid4() + new_name = new_value = "donkey" variable = service.create_variable(user_id, "old_name", "old_value", session=session) saved = variable.model_dump() variable = VariableUpdate(**saved) - variable.name = "new_name" - variable.value = "new_value" + variable.name = new_name + variable.value = new_value variable.default_fields = ["new_field"] result = service.update_variable_fields( @@ -166,6 +165,8 @@ def test_update_variable_fields(service, session): session=session, ) + assert result.name == new_name + assert result.value != new_value assert saved.get("id") == result.id assert saved.get("user_id") == result.user_id assert saved.get("name") != result.name @@ -185,26 +186,21 @@ def test_delete_variable(service, session): service.create_variable(user_id, name, value, session=session) recovered = service.get_variable(user_id, name, field, session=session) service.delete_variable(user_id, name, session=session) - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{name} variable not found."): service.get_variable(user_id, name, field, session) assert recovered == value - assert name in str(exc.value) - assert "variable not found" in str(exc.value) -def test_delete_variable__ValueError(service, session): +def test_delete_variable__valueerror(service, session): user_id = uuid4() name = "name" - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{name} variable not found."): service.delete_variable(user_id, name, session=session) - assert name in str(exc.value) - assert "variable not found" in str(exc.value) - -def test_delete_varaible_by_id(service, session): +def test_delete_variable_by_id(service, session): user_id = uuid4() name = "name" value = "value" @@ -213,24 +209,19 @@ def test_delete_varaible_by_id(service, session): saved = service.create_variable(user_id, name, value, session=session) recovered = service.get_variable(user_id, name, field, session=session) service.delete_variable_by_id(user_id, saved.id, session=session) - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{name} variable not found."): service.get_variable(user_id, name, field, session) assert recovered == value - assert name in str(exc.value) - assert "variable not found" in str(exc.value) -def test_delete_variable_by_id__ValueError(service, session): +def test_delete_variable_by_id__valueerror(service, session): user_id = uuid4() variable_id = uuid4() - with pytest.raises(ValueError) as exc: + with pytest.raises(ValueError, match=f"{variable_id} variable not found."): service.delete_variable_by_id(user_id, variable_id, session=session) - assert str(variable_id) in str(exc.value) - assert "variable not found" in str(exc.value) - def test_create_variable(service, session): user_id = uuid4() diff --git a/src/backend/tests/unit/test_api_key.py b/src/backend/tests/unit/test_api_key.py index 92d649cae32b..d3358f873486 100644 --- a/src/backend/tests/unit/test_api_key.py +++ b/src/backend/tests/unit/test_api_key.py @@ -1,44 +1,48 @@ import pytest +from httpx import AsyncClient from langflow.services.database.models.api_key import ApiKeyCreate @pytest.fixture -def api_key(client, logged_in_headers, active_user): +async def api_key( + client, + logged_in_headers, + active_user, # noqa: ARG001 +): api_key = ApiKeyCreate(name="test-api-key") - response = client.post("api/v1/api_key", data=api_key.model_dump_json(), headers=logged_in_headers) + response = await client.post("api/v1/api_key/", data=api_key.model_dump_json(), headers=logged_in_headers) assert response.status_code == 200, response.text return response.json() -def test_get_api_keys(client, logged_in_headers, api_key): - response = client.get("api/v1/api_key", headers=logged_in_headers) +@pytest.mark.usefixtures("api_key") +async def test_get_api_keys(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/api_key/", headers=logged_in_headers) assert response.status_code == 200, response.text data = response.json() assert "total_count" in data assert "user_id" in data assert "api_keys" in data assert any("test-api-key" in api_key["name"] for api_key in data["api_keys"]) - # assert all api keys in data["api_keys"] are masked assert all("**" in api_key["api_key"] for api_key in data["api_keys"]) -def test_create_api_key(client, logged_in_headers): +async def test_create_api_key(client: AsyncClient, logged_in_headers): api_key_name = "test-api-key" - response = client.post("api/v1/api_key", json={"name": api_key_name}, headers=logged_in_headers) + response = await client.post("api/v1/api_key/", json={"name": api_key_name}, headers=logged_in_headers) assert response.status_code == 200 data = response.json() - assert "name" in data and data["name"] == api_key_name + assert "name" in data + assert data["name"] == api_key_name assert "api_key" in data - # When creating the API key is returned which is - # the only time the API key is unmasked assert "**" not in data["api_key"] -def test_delete_api_key(client, logged_in_headers, active_user, api_key): - # Assuming a function to create a test API key, returning the key ID +@pytest.mark.usefixtures("active_user") +async def test_delete_api_key(client, logged_in_headers, api_key): api_key_id = api_key["id"] - response = client.delete(f"api/v1/api_key/{api_key_id}", headers=logged_in_headers) + response = await client.delete(f"api/v1/api_key/{api_key_id}", headers=logged_in_headers) assert response.status_code == 200 data = response.json() assert data["detail"] == "API Key deleted" diff --git a/src/backend/tests/unit/test_chat_endpoint.py b/src/backend/tests/unit/test_chat_endpoint.py index 81d0016ec23d..c892da46301c 100644 --- a/src/backend/tests/unit/test_chat_endpoint.py +++ b/src/backend/tests/unit/test_chat_endpoint.py @@ -1,46 +1,51 @@ import json from uuid import UUID -from orjson import orjson +import pytest from langflow.memory import get_messages from langflow.services.database.models.flow import FlowCreate, FlowUpdate +from orjson import orjson -def test_build_flow(client, json_memory_chatbot_no_llm, logged_in_headers): - flow_id = _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) +@pytest.mark.benchmark +async def test_build_flow(client, json_memory_chatbot_no_llm, logged_in_headers): + flow_id = await _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) - with client.stream("POST", f"api/v1/build/{flow_id}/flow", json={}, headers=logged_in_headers) as r: - consume_and_assert_stream(r) + async with client.stream("POST", f"api/v1/build/{flow_id}/flow", json={}, headers=logged_in_headers) as r: + await consume_and_assert_stream(r) check_messages(flow_id) -def test_build_flow_from_request_data(client, json_memory_chatbot_no_llm, logged_in_headers): - flow_id = _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) - flow_data = client.get("api/v1/flows/" + str(flow_id), headers=logged_in_headers).json() +@pytest.mark.benchmark +async def test_build_flow_from_request_data(client, json_memory_chatbot_no_llm, logged_in_headers): + flow_id = await _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) + response = await client.get("api/v1/flows/" + str(flow_id), headers=logged_in_headers) + flow_data = response.json() - with client.stream( + async with client.stream( "POST", f"api/v1/build/{flow_id}/flow", json={"data": flow_data["data"]}, headers=logged_in_headers ) as r: - consume_and_assert_stream(r) + await consume_and_assert_stream(r) check_messages(flow_id) -def test_build_flow_with_frozen_path(client, json_memory_chatbot_no_llm, logged_in_headers): - flow_id = _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) +async def test_build_flow_with_frozen_path(client, json_memory_chatbot_no_llm, logged_in_headers): + flow_id = await _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers) - flow_data = client.get("api/v1/flows/" + str(flow_id), headers=logged_in_headers).json() + response = await client.get("api/v1/flows/" + str(flow_id), headers=logged_in_headers) + flow_data = response.json() flow_data["data"]["nodes"][0]["data"]["node"]["frozen"] = True - response = client.patch( - "api/v1/flows/" + str(flow_id), + response = await client.patch( + f"api/v1/flows/{flow_id}", json=FlowUpdate(name="Flow", description="description", data=flow_data["data"]).model_dump(), headers=logged_in_headers, ) response.raise_for_status() - with client.stream("POST", f"api/v1/build/{flow_id}/flow", json={}, headers=logged_in_headers) as r: - consume_and_assert_stream(r) + async with client.stream("POST", f"api/v1/build/{flow_id}/flow", json={}, headers=logged_in_headers) as r: + await consume_and_assert_stream(r) check_messages(flow_id) @@ -57,9 +62,9 @@ def check_messages(flow_id): assert messages[1].sender_name == "AI" -def consume_and_assert_stream(r): +async def consume_and_assert_stream(r): count = 0 - for line in r.iter_lines(): + async for line in r.aiter_lines(): # httpx split by \n, but ndjson sends two \n for each line if not line: continue @@ -68,7 +73,7 @@ def consume_and_assert_stream(r): assert parsed["event"] == "vertices_sorted" ids = parsed["data"]["ids"] ids.sort() - assert ids == ["ChatInput-CIGht", "Memory-amN4Z"] + assert ids == ["ChatInput-CIGht"] to_run = parsed["data"]["to_run"] to_run.sort() @@ -79,15 +84,15 @@ def consume_and_assert_stream(r): elif count == 5: assert parsed["event"] == "end" else: - raise ValueError(f"Unexpected line: {line}") + msg = f"Unexpected line: {line}" + raise ValueError(msg) count += 1 -def _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers): +async def _create_flow(client, json_memory_chatbot_no_llm, logged_in_headers): vector_store = orjson.loads(json_memory_chatbot_no_llm) data = vector_store["data"] vector_store = FlowCreate(name="Flow", description="description", data=data, endpoint_name="f") - response = client.post("api/v1/flows/", json=vector_store.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=vector_store.model_dump(), headers=logged_in_headers) response.raise_for_status() - flow_id = response.json()["id"] - return flow_id + return response.json()["id"] diff --git a/src/backend/tests/unit/test_cli.py b/src/backend/tests/unit/test_cli.py index 75614054f2fc..ee40633a181e 100644 --- a/src/backend/tests/unit/test_cli.py +++ b/src/backend/tests/unit/test_cli.py @@ -1,6 +1,3 @@ -from pathlib import Path -from tempfile import tempdir - import pytest from langflow.__main__ import app from langflow.services import deps @@ -14,12 +11,9 @@ def default_settings(): ] -def test_components_path(runner, client, default_settings): - # Create a foldr in the tmp directory - - temp_dir = Path(tempdir) +def test_components_path(runner, default_settings, tmp_path): # create a "components" folder - temp_dir = temp_dir / "components" + temp_dir = tmp_path / "components" temp_dir.mkdir(exist_ok=True) result = runner.invoke( @@ -31,7 +25,7 @@ def test_components_path(runner, client, default_settings): assert str(temp_dir) in settings_service.settings.components_path -def test_superuser(runner, client, session): +def test_superuser(runner): result = runner.invoke(app, ["superuser"], input="admin\nadmin\n") assert result.exit_code == 0, result.stdout assert "Superuser created successfully." in result.stdout diff --git a/src/backend/tests/unit/test_custom_component.py b/src/backend/tests/unit/test_custom_component.py index 5d91cc9dc14b..92ce75d95e56 100644 --- a/src/backend/tests/unit/test_custom_component.py +++ b/src/backend/tests/unit/test_custom_component.py @@ -1,27 +1,20 @@ import ast import types -from uuid import uuid4 +from pathlib import Path +from textwrap import dedent import pytest from langchain_core.documents import Document - from langflow.custom import Component, CustomComponent from langflow.custom.code_parser.code_parser import CodeParser, CodeSyntaxError from langflow.custom.custom_component.base_component import BaseComponent, ComponentCodeNullError from langflow.custom.utils import build_custom_component_template -from langflow.services.database.models.flow import FlowCreate - - -@pytest.fixture -def client(): - pass @pytest.fixture def code_component_with_multiple_outputs(): - with open("src/backend/tests/data/component_multiple_outputs.py", "r") as f: - code = f.read() - return Component(_code=code) + code = Path("src/backend/tests/data/component_multiple_outputs.py").read_text(encoding="utf-8") + return Component(_code=code) code_default = """ @@ -45,27 +38,20 @@ def build(self, url: str, llm: BaseLanguageModel) -> Document: def test_code_parser_init(): - """ - Test the initialization of the CodeParser class. - """ + """Test the initialization of the CodeParser class.""" parser = CodeParser(code_default) assert parser.code == code_default def test_code_parser_get_tree(): - """ - Test the __get_tree method of the CodeParser class. - """ + """Test the __get_tree method of the CodeParser class.""" parser = CodeParser(code_default) tree = parser.get_tree() assert isinstance(tree, ast.AST) def test_code_parser_syntax_error(): - """ - Test the __get_tree method raises the - CodeSyntaxError when given incorrect syntax. - """ + """Test the __get_tree method raises the CodeSyntaxError when given incorrect syntax.""" code_syntax_error = "zzz import os" parser = CodeParser(code_syntax_error) @@ -74,37 +60,28 @@ def test_code_parser_syntax_error(): def test_component_init(): - """ - Test the initialization of the Component class. - """ + """Test the initialization of the Component class.""" component = BaseComponent(_code=code_default, _function_entrypoint_name="build") assert component._code == code_default assert component._function_entrypoint_name == "build" def test_component_get_code_tree(): - """ - Test the get_code_tree method of the Component class. - """ + """Test the get_code_tree method of the Component class.""" component = BaseComponent(_code=code_default, _function_entrypoint_name="build") tree = component.get_code_tree(component._code) assert "imports" in tree def test_component_code_null_error(): - """ - Test the get_function method raises the - ComponentCodeNullError when the code is empty. - """ + """Test the get_function method raises the ComponentCodeNullError when the code is empty.""" component = BaseComponent(_code="", _function_entrypoint_name="") with pytest.raises(ComponentCodeNullError): component.get_function() def test_custom_component_init(): - """ - Test the initialization of the CustomComponent class. - """ + """Test the initialization of the CustomComponent class.""" function_entrypoint_name = "build" custom_component = CustomComponent(_code=code_default, _function_entrypoint_name=function_entrypoint_name) @@ -113,28 +90,21 @@ def test_custom_component_init(): def test_custom_component_build_template_config(): - """ - Test the build_template_config property of the CustomComponent class. - """ + """Test the build_template_config property of the CustomComponent class.""" custom_component = CustomComponent(_code=code_default, _function_entrypoint_name="build") config = custom_component.build_template_config() assert isinstance(config, dict) def test_custom_component_get_function(): - """ - Test the get_function property of the CustomComponent class. - """ + """Test the get_function property of the CustomComponent class.""" custom_component = CustomComponent(_code="def build(): pass", _function_entrypoint_name="build") my_function = custom_component.get_function() assert isinstance(my_function, types.FunctionType) def test_code_parser_parse_imports_import(): - """ - Test the parse_imports method of the CodeParser - class with an import statement. - """ + """Test the parse_imports method of the CodeParser class with an import statement.""" parser = CodeParser(code_default) tree = parser.get_tree() for node in ast.walk(tree): @@ -144,10 +114,7 @@ class with an import statement. def test_code_parser_parse_imports_importfrom(): - """ - Test the parse_imports method of the CodeParser - class with an import from statement. - """ + """Test the parse_imports method of the CodeParser class with an import from statement.""" parser = CodeParser("from os import path") tree = parser.get_tree() for node in ast.walk(tree): @@ -157,9 +124,7 @@ class with an import from statement. def test_code_parser_parse_functions(): - """ - Test the parse_functions method of the CodeParser class. - """ + """Test the parse_functions method of the CodeParser class.""" parser = CodeParser("def test(): pass") tree = parser.get_tree() for node in ast.walk(tree): @@ -170,9 +135,7 @@ def test_code_parser_parse_functions(): def test_code_parser_parse_classes(): - """ - Test the parse_classes method of the CodeParser class. - """ + """Test the parse_classes method of the CodeParser class.""" parser = CodeParser("from langflow.custom import Component\n\nclass Test(Component): pass") tree = parser.get_tree() for node in ast.walk(tree): @@ -183,21 +146,17 @@ def test_code_parser_parse_classes(): def test_code_parser_parse_classes_raises(): - """ - Test the parse_classes method of the CodeParser class. - """ + """Test the parse_classes method of the CodeParser class.""" parser = CodeParser("class Test: pass") tree = parser.get_tree() - with pytest.raises(TypeError): - for node in ast.walk(tree): - if isinstance(node, ast.ClassDef): + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + with pytest.raises(TypeError): parser.parse_classes(node) def test_code_parser_parse_global_vars(): - """ - Test the parse_global_vars method of the CodeParser class. - """ + """Test the parse_global_vars method of the CodeParser class.""" parser = CodeParser("x = 1") tree = parser.get_tree() for node in ast.walk(tree): @@ -208,20 +167,14 @@ def test_code_parser_parse_global_vars(): def test_component_get_function_valid(): - """ - Test the get_function method of the Component - class with valid code and function_entrypoint_name. - """ + """Test the get_function method of the Component class with valid code and function_entrypoint_name.""" component = BaseComponent(_code="def build(): pass", _function_entrypoint_name="build") my_function = component.get_function() assert callable(my_function) def test_custom_component_get_function_entrypoint_args(): - """ - Test the get_function_entrypoint_args - property of the CustomComponent class. - """ + """Test the get_function_entrypoint_args property of the CustomComponent class.""" custom_component = CustomComponent(_code=code_default, _function_entrypoint_name="build") args = custom_component.get_function_entrypoint_args assert len(args) == 3 @@ -231,39 +184,28 @@ def test_custom_component_get_function_entrypoint_args(): def test_custom_component_get_function_entrypoint_return_type(): - """ - Test the get_function_entrypoint_return_type - property of the CustomComponent class. - """ - + """Test the get_function_entrypoint_return_type property of the CustomComponent class.""" custom_component = CustomComponent(_code=code_default, _function_entrypoint_name="build") - return_type = custom_component.get_function_entrypoint_return_type + return_type = custom_component._get_function_entrypoint_return_type assert return_type == [Document] def test_custom_component_get_main_class_name(): - """ - Test the get_main_class_name property of the CustomComponent class. - """ + """Test the get_main_class_name property of the CustomComponent class.""" custom_component = CustomComponent(_code=code_default, _function_entrypoint_name="build") class_name = custom_component.get_main_class_name assert class_name == "YourComponent" def test_custom_component_get_function_valid(): - """ - Test the get_function property of the CustomComponent - class with valid code and function_entrypoint_name. - """ + """Test the get_function property of the CustomComponent class with valid code and function_entrypoint_name.""" custom_component = CustomComponent(_code="def build(): pass", _function_entrypoint_name="build") my_function = custom_component.get_function assert callable(my_function) def test_code_parser_parse_arg_no_annotation(): - """ - Test the parse_arg method of the CodeParser class without an annotation. - """ + """Test the parse_arg method of the CodeParser class without an annotation.""" parser = CodeParser("") arg = ast.arg(arg="x", annotation=None) result = parser.parse_arg(arg, None) @@ -272,9 +214,7 @@ def test_code_parser_parse_arg_no_annotation(): def test_code_parser_parse_arg_with_annotation(): - """ - Test the parse_arg method of the CodeParser class with an annotation. - """ + """Test the parse_arg method of the CodeParser class with an annotation.""" parser = CodeParser("") arg = ast.arg(arg="x", annotation=ast.Name(id="int", ctx=ast.Load())) result = parser.parse_arg(arg, None) @@ -283,10 +223,7 @@ def test_code_parser_parse_arg_with_annotation(): def test_code_parser_parse_callable_details_no_args(): - """ - Test the parse_callable_details method of the - CodeParser class with a function with no arguments. - """ + """Test the parse_callable_details method of the CodeParser class with a function with no arguments.""" parser = CodeParser("") node = ast.FunctionDef( name="test", @@ -301,9 +238,7 @@ def test_code_parser_parse_callable_details_no_args(): def test_code_parser_parse_assign(): - """ - Test the parse_assign method of the CodeParser class. - """ + """Test the parse_assign method of the CodeParser class.""" parser = CodeParser("") stmt = ast.Assign(targets=[ast.Name(id="x", ctx=ast.Store())], value=ast.Num(n=1)) result = parser.parse_assign(stmt) @@ -312,9 +247,7 @@ def test_code_parser_parse_assign(): def test_code_parser_parse_ann_assign(): - """ - Test the parse_ann_assign method of the CodeParser class. - """ + """Test the parse_ann_assign method of the CodeParser class.""" parser = CodeParser("") stmt = ast.AnnAssign( target=ast.Name(id="x", ctx=ast.Store()), @@ -329,10 +262,7 @@ def test_code_parser_parse_ann_assign(): def test_code_parser_parse_function_def_not_init(): - """ - Test the parse_function_def method of the - CodeParser class with a function that is not __init__. - """ + """Test the parse_function_def method of the CodeParser class with a function that is not __init__.""" parser = CodeParser("") stmt = ast.FunctionDef( name="test", @@ -347,10 +277,7 @@ def test_code_parser_parse_function_def_not_init(): def test_code_parser_parse_function_def_init(): - """ - Test the parse_function_def method of the - CodeParser class with an __init__ function. - """ + """Test the parse_function_def method of the CodeParser class with an __init__ function.""" parser = CodeParser("") stmt = ast.FunctionDef( name="__init__", @@ -365,41 +292,30 @@ def test_code_parser_parse_function_def_init(): def test_component_get_code_tree_syntax_error(): - """ - Test the get_code_tree method of the Component class - raises the CodeSyntaxError when given incorrect syntax. - """ + """Test the get_code_tree method of the Component class raises the CodeSyntaxError when given incorrect syntax.""" component = BaseComponent(_code="import os as", _function_entrypoint_name="build") with pytest.raises(CodeSyntaxError): component.get_code_tree(component._code) def test_custom_component_class_template_validation_no_code(): - """ - Test the _class_template_validation method of the CustomComponent class - raises the HTTPException when the code is None. - """ + """Test CustomComponent._class_template_validation raises the HTTPException when the code is None.""" custom_component = CustomComponent(_code=None, _function_entrypoint_name="build") with pytest.raises(TypeError): custom_component.get_function() def test_custom_component_get_code_tree_syntax_error(): - """ - Test the get_code_tree method of the CustomComponent class - raises the CodeSyntaxError when given incorrect syntax. - """ + """Test CustomComponent.get_code_tree raises the CodeSyntaxError when given incorrect syntax.""" custom_component = CustomComponent(_code="import os as", _function_entrypoint_name="build") with pytest.raises(CodeSyntaxError): custom_component.get_code_tree(custom_component._code) def test_custom_component_get_function_entrypoint_args_no_args(): - """ - Test the get_function_entrypoint_args property of - the CustomComponent class with a build method with no arguments. - """ + """Test CustomComponent.get_function_entrypoint_args with a build method with no arguments.""" my_code = """ +from langflow.custom import CustomComponent class MyMainClass(CustomComponent): def build(): pass""" @@ -410,25 +326,20 @@ def build(): def test_custom_component_get_function_entrypoint_return_type_no_return_type(): - """ - Test the get_function_entrypoint_return_type property of the - CustomComponent class with a build method with no return type. - """ + """Test CustomComponent.get_function_entrypoint_return_type with a build method with no return type.""" my_code = """ +from langflow.custom import CustomComponent class MyClass(CustomComponent): def build(): pass""" custom_component = CustomComponent(_code=my_code, _function_entrypoint_name="build") - return_type = custom_component.get_function_entrypoint_return_type + return_type = custom_component._get_function_entrypoint_return_type assert return_type == [] def test_custom_component_get_main_class_name_no_main_class(): - """ - Test the get_main_class_name property of the - CustomComponent class when there is no main class. - """ + """Test the get_main_class_name property of the CustomComponent class when there is no main class.""" my_code = """ def build(): pass""" @@ -439,10 +350,7 @@ def build(): def test_custom_component_build_not_implemented(): - """ - Test the build method of the CustomComponent - class raises the NotImplementedError. - """ + """Test the build method of the CustomComponent class raises the NotImplementedError.""" custom_component = CustomComponent(_code="def build(): pass", _function_entrypoint_name="build") with pytest.raises(NotImplementedError): custom_component.build() @@ -452,13 +360,12 @@ def test_build_config_no_code(): component = CustomComponent(_code=None) assert component.get_function_entrypoint_args == [] - assert component.get_function_entrypoint_return_type == [] + assert component._get_function_entrypoint_return_type == [] @pytest.fixture -def component(client, active_user): +def component(): return CustomComponent( - user_id=active_user.id, field_config={ "fields": { "llm": {"type": "str"}, @@ -469,41 +376,6 @@ def component(client, active_user): ) -@pytest.fixture(scope="session") -def test_flow(db): - flow_data = { - "nodes": [{"id": "1"}, {"id": "2"}], - "edges": [{"source": "1", "target": "2"}], - } - - # Create flow - flow = FlowCreate(id=uuid4(), name="Test Flow", description="Fixture flow", data=flow_data) - - # Add to database - db.add(flow) - db.commit() - - yield flow - - # Clean up - db.delete(flow) - db.commit() - - -@pytest.fixture(scope="session") -def db(app): - # Setup database for tests - yield app.db - - # Teardown - app.db.drop_all() - - -def test_list_flows_return_type(component): - flows = component.list_flows() - assert isinstance(flows, list) - - def test_build_config_return_type(component): config = component.build_config() assert isinstance(config, dict) @@ -535,6 +407,28 @@ def test_build_config_field_value_keys(component): assert all("type" in value for value in field_values) -def test_custom_component_multiple_outputs(code_component_with_multiple_outputs, active_user): - frontnd_node_dict, _ = build_custom_component_template(code_component_with_multiple_outputs, active_user.id) +def test_custom_component_multiple_outputs(code_component_with_multiple_outputs): + frontnd_node_dict, _ = build_custom_component_template(code_component_with_multiple_outputs) assert frontnd_node_dict["outputs"][0]["types"] == ["Text"] + + +def test_custom_component_subclass_from_lctoolcomponent(): + # Import LCToolComponent and create a subclass + code = dedent(""" + from langflow.base.langchain_utilities.model import LCToolComponent + from langchain_core.tools import Tool + class MyComponent(LCToolComponent): + name: str = "MyComponent" + description: str = "MyComponent" + + def build_tool(self) -> Tool: + return Tool(name="MyTool", description="MyTool") + + def run_model(self)-> Data: + return Data(data="Hello World") + """) + component = Component(_code=code) + frontend_node, _ = build_custom_component_template(component) + assert "outputs" in frontend_node + assert frontend_node["outputs"][0]["types"] != [] + assert frontend_node["outputs"][1]["types"] != [] diff --git a/src/backend/tests/unit/test_custom_component_with_client.py b/src/backend/tests/unit/test_custom_component_with_client.py index ab8e7859e80f..736c4913fab1 100644 --- a/src/backend/tests/unit/test_custom_component_with_client.py +++ b/src/backend/tests/unit/test_custom_component_with_client.py @@ -1,11 +1,13 @@ import pytest - from langflow.custom.custom_component.custom_component import CustomComponent from langflow.field_typing.constants import Data @pytest.fixture -def component(client, active_user): +def component( + client, # noqa: ARG001 + active_user, +): return CustomComponent( user_id=active_user.id, field_config={ @@ -23,3 +25,8 @@ def test_list_flows_flow_objects(component): are_flows = [isinstance(flow, Data) for flow in flows] flow_types = [type(flow) for flow in flows] assert all(are_flows), f"Expected all flows to be Data objects, got {flow_types}" + + +def test_list_flows_return_type(component): + flows = component.list_flows() + assert isinstance(flows, list) diff --git a/src/backend/tests/unit/test_data_class.py b/src/backend/tests/unit/test_data_class.py index 6e7374f8b938..83ee12bd02ab 100644 --- a/src/backend/tests/unit/test_data_class.py +++ b/src/backend/tests/unit/test_data_class.py @@ -1,14 +1,8 @@ import pytest from langchain_core.documents import Document - from langflow.schema import Data -@pytest.fixture -def client(): - pass - - def test_data_initialization(): record = Data(text_key="msg", data={"msg": "Hello, World!", "extra": "value"}) assert record.msg == "Hello, World!" diff --git a/src/backend/tests/unit/test_data_components.py b/src/backend/tests/unit/test_data_components.py index f6938c5d92f2..6dc0db9f061f 100644 --- a/src/backend/tests/unit/test_data_components.py +++ b/src/backend/tests/unit/test_data_components.py @@ -1,28 +1,20 @@ -import os import tempfile from pathlib import Path -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import httpx import pytest import respx from httpx import Response - from langflow.components import data -@pytest.fixture -def client(): - pass - - @pytest.fixture def api_request(): # This fixture provides an instance of APIRequest for each test case return data.APIRequestComponent() -@pytest.mark.asyncio @respx.mock async def test_successful_get_request(api_request): # Mocking a successful GET request @@ -60,7 +52,6 @@ def test_parse_curl(api_request): assert new_build_config["body"]["value"] == {"key": "value"} -@pytest.mark.asyncio @respx.mock async def test_failed_request(api_request): # Mocking a failed GET request @@ -75,7 +66,6 @@ async def test_failed_request(api_request): assert result.data["status_code"] == 404 -@pytest.mark.asyncio @respx.mock async def test_timeout(api_request): # Mocking a timeout @@ -91,7 +81,6 @@ async def test_timeout(api_request): assert result.data["error"] == "Request timed out" -@pytest.mark.asyncio @respx.mock async def test_build_with_multiple_urls(api_request): # This test depends on having a working internet connection and accessible URLs @@ -120,15 +109,15 @@ async def test_build_with_multiple_urls(api_request): assert len(results) == len(urls) -@patch("langflow.components.data.Directory.parallel_load_data") -@patch("langflow.components.data.Directory.retrieve_file_paths") +@patch("langflow.components.data.directory.parallel_load_data") +@patch("langflow.components.data.directory.retrieve_file_paths") @patch("langflow.components.data.DirectoryComponent.resolve_path") def test_directory_component_build_with_multithreading( mock_resolve_path, mock_retrieve_file_paths, mock_parallel_load_data ): # Arrange directory_component = data.DirectoryComponent() - path = os.path.dirname(os.path.abspath(__file__)) + path = Path(__file__).resolve().parent depth = 1 max_concurrency = 2 load_hidden = False @@ -136,16 +125,15 @@ def test_directory_component_build_with_multithreading( silent_errors = False use_multithreading = True - mock_resolve_path.return_value = path - mock_retrieve_file_paths.return_value = [ - os.path.join(path, file) for file in os.listdir(path) if file.endswith(".py") - ] + mock_resolve_path.return_value = str(path) + + mock_retrieve_file_paths.return_value = [str(p) for p in path.iterdir() if p.suffix == ".py"] mock_parallel_load_data.return_value = [Mock()] # Act directory_component.set_attributes( { - "path": path, + "path": str(path), "depth": depth, "max_concurrency": max_concurrency, "load_hidden": load_hidden, @@ -157,10 +145,12 @@ def test_directory_component_build_with_multithreading( directory_component.load_directory() # Assert - mock_resolve_path.assert_called_once_with(path) - mock_retrieve_file_paths.assert_called_once_with(path, load_hidden, recursive, depth) + mock_resolve_path.assert_called_once_with(str(path)) + mock_retrieve_file_paths.assert_called_once_with( + str(path), load_hidden=load_hidden, recursive=recursive, depth=depth, types=ANY + ) mock_parallel_load_data.assert_called_once_with( - mock_retrieve_file_paths.return_value, silent_errors, max_concurrency + mock_retrieve_file_paths.return_value, silent_errors=silent_errors, max_concurrency=max_concurrency ) @@ -168,18 +158,16 @@ def test_directory_without_mocks(): directory_component = data.DirectoryComponent() with tempfile.TemporaryDirectory() as temp_dir: - with open(temp_dir + "/test.txt", "w") as f: - f.write("test") + (Path(temp_dir) / "test.txt").write_text("test", encoding="utf-8") # also add a json file - with open(temp_dir + "/test.json", "w") as f: - f.write('{"test": "test"}') + (Path(temp_dir) / "test.json").write_text('{"test": "test"}', encoding="utf-8") directory_component.set_attributes({"path": str(temp_dir), "use_multithreading": False}) results = directory_component.load_directory() assert len(results) == 2 values = ["test", '{"test":"test"}'] assert all(result.text in values for result in results), [ - (len(result.text), len(val)) for result, val in zip(results, values) + (len(result.text), len(val)) for result, val in zip(results, values, strict=True) ] # in ../docs/docs/components there are many mdx files diff --git a/src/backend/tests/unit/test_database.py b/src/backend/tests/unit/test_database.py index 44e347f1e118..77b1959043c7 100644 --- a/src/backend/tests/unit/test_database.py +++ b/src/backend/tests/unit/test_database.py @@ -1,26 +1,19 @@ +import asyncio import json -from collections import namedtuple +from typing import NamedTuple from uuid import UUID, uuid4 import orjson import pytest -from fastapi.testclient import TestClient -from sqlmodel import Session - +from httpx import AsyncClient from langflow.api.v1.schemas import FlowListCreate, ResultDataResponse from langflow.graph.utils import log_transaction, log_vertex_build -from langflow.initial_setup.setup import load_flows_from_directory, load_starter_projects +from langflow.initial_setup.setup import load_starter_projects from langflow.services.database.models.base import orjson_dumps from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate -from langflow.services.database.models.transactions.crud import get_transactions_by_flow_id -from langflow.services.database.utils import migrate_transactions_from_monitor_service_to_database, session_getter -from langflow.services.deps import get_db_service, get_monitor_service, session_scope -from langflow.services.monitor.schema import TransactionModel -from langflow.services.monitor.utils import ( - add_row_to_table, - drop_and_create_table_if_schema_mismatch, - new_duckdb_locked_connection, -) +from langflow.services.database.models.folder.model import FolderCreate +from langflow.services.database.utils import session_getter +from langflow.services.deps import get_db_service @pytest.fixture(scope="module") @@ -37,63 +30,210 @@ def json_style(): ) -def test_create_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_flow(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name=str(uuid4()), description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data # flow is optional so we can create a flow without a flow flow = FlowCreate(name=str(uuid4())) - response = client.post("api/v1/flows/", json=flow.model_dump(exclude_unset=True), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(exclude_unset=True), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data -def test_read_flows(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_read_flows(client: AsyncClient, json_flow: str, logged_in_headers): flow_data = orjson.loads(json_flow) data = flow_data["data"] flow = FlowCreate(name=str(uuid4()), description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data flow = FlowCreate(name=str(uuid4()), description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - response = client.get("api/v1/flows/", headers=logged_in_headers) + response = await client.get("api/v1/flows/", headers=logged_in_headers) assert response.status_code == 200 assert len(response.json()) > 0 -def test_read_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): +async def test_read_flows_pagination_with_params(client: AsyncClient, logged_in_headers): + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 3, "size": 10, "get_all": False} + ) + assert response.status_code == 200 + assert response.json()["page"] == 3 + assert response.json()["size"] == 10 + assert response.json()["pages"] == 0 + assert response.json()["total"] == 0 + assert len(response.json()["items"]) == 0 + + +async def test_read_flows_pagination_with_flows(client: AsyncClient, logged_in_headers): + number_of_flows = 30 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + flow_ids = [] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + flow_ids.append(response.json()["id"]) + + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 3, "size": 10, "get_all": False} + ) + assert response.status_code == 200 + assert response.json()["page"] == 3 + assert response.json()["size"] == 10 + assert response.json()["pages"] == 3 + assert response.json()["total"] == number_of_flows + assert len(response.json()["items"]) == 10 + + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 4, "size": 10, "get_all": False} + ) + assert response.status_code == 200 + assert response.json()["page"] == 4 + assert response.json()["size"] == 10 + assert response.json()["pages"] == 3 + assert response.json()["total"] == number_of_flows + assert len(response.json()["items"]) == 0 + + +async def test_read_flows_custom_page_size(client: AsyncClient, logged_in_headers): + number_of_flows = 30 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 1, "size": 15, "get_all": False} + ) + assert response.status_code == 200 + assert response.json()["page"] == 1 + assert response.json()["size"] == 15 + assert response.json()["pages"] == 2 + assert response.json()["total"] == number_of_flows + assert len(response.json()["items"]) == 15 + + +async def test_read_flows_invalid_page(client: AsyncClient, logged_in_headers): + number_of_flows = 30 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + flow_ids = [] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + flow_ids.append(response.json()["id"]) + + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 0, "size": 10, "get_all": False} + ) + assert response.status_code == 422 # Assuming 422 is the status code for invalid input + + +async def test_read_flows_invalid_size(client: AsyncClient, logged_in_headers): + number_of_flows = 30 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + flow_ids = [] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + flow_ids.append(response.json()["id"]) + + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"page": 1, "size": 0, "get_all": False} + ) + assert response.status_code == 422 # Assuming 422 is the status code for invalid input + + +async def test_read_flows_no_pagination_params(client: AsyncClient, logged_in_headers): + number_of_flows = 30 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + + response = await client.get("api/v1/flows/", headers=logged_in_headers, params={"get_all": False}) + assert response.status_code == 200 + # Assert default pagination values, adjust these according to your API's default behavior + assert response.json()["page"] == 1 + assert response.json()["size"] == 50 + assert response.json()["pages"] == 1 + assert response.json()["total"] == number_of_flows + assert len(response.json()["items"]) == number_of_flows + + +async def test_read_flows_components_only_paginated(client: AsyncClient, logged_in_headers): + number_of_flows = 10 + flows = [ + FlowCreate(name=f"Flow {i}", description="description", data={}, is_component=True) + for i in range(number_of_flows) + ] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + response = await client.get( + "api/v1/flows/", headers=logged_in_headers, params={"components_only": True, "get_all": False} + ) + assert response.status_code == 200 + response_json = response.json() + assert response_json["total"] == 10 + assert response_json["pages"] == 1 + assert response_json["page"] == 1 + assert response_json["size"] == 50 + assert all(flow["is_component"] is True for flow in response_json["items"]) + + +async def test_read_flows_components_only(client: AsyncClient, logged_in_headers): + number_of_flows = 10 + flows = [ + FlowCreate(name=f"Flow {i}", description="description", data={}, is_component=True) + for i in range(number_of_flows) + ] + for flow in flows: + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + response = await client.get("api/v1/flows/", headers=logged_in_headers, params={"components_only": True}) + assert response.status_code == 200 + response_json = response.json() + assert all(flow["is_component"] is True for flow in response_json) + + +async def test_read_flow(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] - flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + unique_name = str(uuid4()) + flow = FlowCreate(name=unique_name, description="description", data=data) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) flow_id = response.json()["id"] # flow_id should be a UUID but is a string # turn it into a UUID flow_id = UUID(flow_id) - response = client.get(f"api/v1/flows/{flow_id}", headers=logged_in_headers) + response = await client.get(f"api/v1/flows/{flow_id}", headers=logged_in_headers) assert response.status_code == 200 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data -def test_update_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_update_flow(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) flow_id = response.json()["id"] updated_flow = FlowUpdate( @@ -101,7 +241,7 @@ def test_update_flow(client: TestClient, json_flow: str, active_user, logged_in_ description="updated description", data=data, ) - response = client.patch(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) + response = await client.patch(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) assert response.status_code == 200 assert response.json()["name"] == updated_flow.name @@ -109,50 +249,50 @@ def test_update_flow(client: TestClient, json_flow: str, active_user, logged_in_ # assert response.json()["data"] == updated_flow.data -def test_delete_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_delete_flow(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) flow_id = response.json()["id"] - response = client.delete(f"api/v1/flows/{flow_id}", headers=logged_in_headers) + response = await client.delete(f"api/v1/flows/{flow_id}", headers=logged_in_headers) assert response.status_code == 200 assert response.json()["message"] == "Flow deleted successfully" -def test_delete_flows(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_delete_flows(client: AsyncClient, logged_in_headers): # Create ten flows number_of_flows = 10 flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] flow_ids = [] for flow in flows: - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 flow_ids.append(response.json()["id"]) - response = client.request("DELETE", "api/v1/flows/", headers=logged_in_headers, json=flow_ids) + response = await client.request("DELETE", "api/v1/flows/", headers=logged_in_headers, json=flow_ids) assert response.status_code == 200, response.content assert response.json().get("deleted") == number_of_flows -@pytest.mark.asyncio -async def test_delete_flows_with_transaction_and_build( - client: TestClient, json_flow: str, active_user, logged_in_headers -): +@pytest.mark.usefixtures("active_user") +async def test_delete_flows_with_transaction_and_build(client: AsyncClient, logged_in_headers): # Create ten flows number_of_flows = 10 flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] flow_ids = [] for flow in flows: - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) assert response.status_code == 201 flow_ids.append(response.json()["id"]) - # Create a transaction for each flow + class VertexTuple(NamedTuple): + id: str + # Create a transaction for each flow for flow_id in flow_ids: - VertexTuple = namedtuple("VertexTuple", ["id"]) - await log_transaction( str(flow_id), source=VertexTuple(id="vid"), target=VertexTuple(id="tid"), status="success" ) @@ -176,62 +316,184 @@ async def test_delete_flows_with_transaction_and_build( artifacts=build.get("artifacts"), ) - response = client.request("DELETE", "api/v1/flows/", headers=logged_in_headers, json=flow_ids) + response = await client.request("DELETE", "api/v1/flows/", headers=logged_in_headers, json=flow_ids) assert response.status_code == 200, response.content assert response.json().get("deleted") == number_of_flows for flow_id in flow_ids: - response = client.request( + response = await client.request( "GET", "api/v1/monitor/transactions", params={"flow_id": flow_id}, headers=logged_in_headers ) assert response.status_code == 200 assert response.json() == [] for flow_id in flow_ids: - response = client.request( + response = await client.request( "GET", "api/v1/monitor/builds", params={"flow_id": flow_id}, headers=logged_in_headers ) assert response.status_code == 200 assert response.json() == {"vertex_builds": {}} -def test_create_flows(client: TestClient, session: Session, json_flow: str, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_delete_folder_with_flows_with_transaction_and_build(client: AsyncClient, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description", components_list=[], flows_list=[]) + + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201, f"Expected status code 201, but got {response.status_code}" + + created_folder = response.json() + folder_id = created_folder["id"] + + # Create ten flows + number_of_flows = 10 + flows = [FlowCreate(name=f"Flow {i}", description="description", data={}) for i in range(number_of_flows)] + flow_ids = [] + for flow in flows: + flow.folder_id = folder_id + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + flow_ids.append(response.json()["id"]) + + class VertexTuple(NamedTuple): + id: str + + # Create a transaction for each flow + for flow_id in flow_ids: + await log_transaction( + str(flow_id), source=VertexTuple(id="vid"), target=VertexTuple(id="tid"), status="success" + ) + + # Create a build for each flow + for flow_id in flow_ids: + build = { + "valid": True, + "params": {}, + "data": ResultDataResponse(), + "artifacts": {}, + "vertex_id": "vid", + "flow_id": flow_id, + } + log_vertex_build( + flow_id=build["flow_id"], + vertex_id=build["vertex_id"], + valid=build["valid"], + params=build["params"], + data=build["data"], + artifacts=build.get("artifacts"), + ) + + response = await client.request("DELETE", f"api/v1/folders/{folder_id}", headers=logged_in_headers) + assert response.status_code == 204 + + for flow_id in flow_ids: + response = await client.request( + "GET", "api/v1/monitor/transactions", params={"flow_id": flow_id}, headers=logged_in_headers + ) + assert response.status_code == 200 + assert response.json() == [] + + for flow_id in flow_ids: + response = await client.request( + "GET", "api/v1/monitor/builds", params={"flow_id": flow_id}, headers=logged_in_headers + ) + assert response.status_code == 200 + assert response.json() == {"vertex_builds": {}} + + +async def test_get_flows_from_folder_pagination(client: AsyncClient, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description", components_list=[], flows_list=[]) + + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201, f"Expected status code 201, but got {response.status_code}" + + created_folder = response.json() + folder_id = created_folder["id"] + + response = await client.get( + f"api/v1/folders/{folder_id}", headers=logged_in_headers, params={"page": 1, "size": 50} + ) + assert response.status_code == 200 + assert response.json()["folder"]["name"] == folder_name + assert response.json()["folder"]["description"] == "Test folder description" + assert response.json()["flows"]["page"] == 1 + assert response.json()["flows"]["size"] == 50 + assert response.json()["flows"]["pages"] == 0 + assert response.json()["flows"]["total"] == 0 + assert len(response.json()["flows"]["items"]) == 0 + + +async def test_get_flows_from_folder_pagination_with_params(client: AsyncClient, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description", components_list=[], flows_list=[]) + + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201, f"Expected status code 201, but got {response.status_code}" + + created_folder = response.json() + folder_id = created_folder["id"] + + response = await client.get( + f"api/v1/folders/{folder_id}", headers=logged_in_headers, params={"page": 3, "size": 10} + ) + assert response.status_code == 200 + assert response.json()["folder"]["name"] == folder_name + assert response.json()["folder"]["description"] == "Test folder description" + assert response.json()["flows"]["page"] == 3 + assert response.json()["flows"]["size"] == 10 + assert response.json()["flows"]["pages"] == 0 + assert response.json()["flows"]["total"] == 0 + assert len(response.json()["flows"]["items"]) == 0 + + +@pytest.mark.usefixtures("session") +async def test_create_flows(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] # Create test data + flow_unique_name = str(uuid4()) + flow_2_unique_name = str(uuid4()) flow_list = FlowListCreate( flows=[ - FlowCreate(name="Flow 1", description="description", data=data), - FlowCreate(name="Flow 2", description="description", data=data), + FlowCreate(name=flow_unique_name, description="description", data=data), + FlowCreate(name=flow_2_unique_name, description="description", data=data), ] ) # Make request to endpoint - response = client.post("api/v1/flows/batch/", json=flow_list.dict(), headers=logged_in_headers) + response = await client.post("api/v1/flows/batch/", json=flow_list.dict(), headers=logged_in_headers) # Check response status code assert response.status_code == 201 # Check response data response_data = response.json() assert len(response_data) == 2 - assert "Flow 1" in response_data[0]["name"] + assert flow_unique_name in response_data[0]["name"] assert response_data[0]["description"] == "description" assert response_data[0]["data"] == data - assert response_data[1]["name"] == "Flow 2" + assert response_data[1]["name"] == flow_2_unique_name assert response_data[1]["description"] == "description" assert response_data[1]["data"] == data -def test_upload_file(client: TestClient, session: Session, json_flow: str, logged_in_headers): +@pytest.mark.usefixtures("session") +async def test_upload_file(client: AsyncClient, json_flow: str, logged_in_headers): flow = orjson.loads(json_flow) data = flow["data"] # Create test data + flow_unique_name = str(uuid4()) + flow_2_unique_name = str(uuid4()) flow_list = FlowListCreate( flows=[ - FlowCreate(name="Flow 1", description="description", data=data), - FlowCreate(name="Flow 2", description="description", data=data), + FlowCreate(name=flow_unique_name, description="description", data=data), + FlowCreate(name=flow_2_unique_name, description="description", data=data), ] ) file_contents = orjson_dumps(flow_list.dict()) - response = client.post( + response = await client.post( "api/v1/flows/upload/", files={"file": ("examples.json", file_contents, "application/json")}, headers=logged_in_headers, @@ -241,17 +503,17 @@ def test_upload_file(client: TestClient, session: Session, json_flow: str, logge # Check response data response_data = response.json() assert len(response_data) == 2 - assert "Flow 1" in response_data[0]["name"] + assert flow_unique_name in response_data[0]["name"] assert response_data[0]["description"] == "description" assert response_data[0]["data"] == data - assert response_data[1]["name"] == "Flow 2" + assert response_data[1]["name"] == flow_2_unique_name assert response_data[1]["description"] == "description" assert response_data[1]["data"] == data -def test_download_file( - client: TestClient, - session: Session, +@pytest.mark.usefixtures("session") +async def test_download_file( + client: AsyncClient, json_flow, active_user, logged_in_headers, @@ -259,25 +521,27 @@ def test_download_file( flow = orjson.loads(json_flow) data = flow["data"] # Create test data + flow_unique_name = str(uuid4()) + flow_2_unique_name = str(uuid4()) flow_list = FlowListCreate( flows=[ - FlowCreate(name="Flow 1", description="description", data=data), - FlowCreate(name="Flow 2", description="description", data=data), + FlowCreate(name=flow_unique_name, description="description", data=data), + FlowCreate(name=flow_2_unique_name, description="description", data=data), ] ) db_manager = get_db_service() - with session_getter(db_manager) as session: + with session_getter(db_manager) as _session: saved_flows = [] for flow in flow_list.flows: flow.user_id = active_user.id db_flow = Flow.model_validate(flow, from_attributes=True) - session.add(db_flow) + _session.add(db_flow) saved_flows.append(db_flow) - session.commit() + _session.commit() # Make request to endpoint inside the session context flow_ids = [str(db_flow.id) for db_flow in saved_flows] # Convert UUIDs to strings flow_ids_json = json.dumps(flow_ids) - response = client.post( + response = await client.post( "api/v1/flows/download/", data=flow_ids_json, headers={**logged_in_headers, "Content-Type": "application/json"}, @@ -290,31 +554,35 @@ def test_download_file( assert "attachment; filename=" in response.headers["Content-Disposition"] -def test_create_flow_with_invalid_data(client: TestClient, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_create_flow_with_invalid_data(client: AsyncClient, logged_in_headers): flow = {"name": "a" * 256, "data": "Invalid flow data"} - response = client.post("api/v1/flows/", json=flow, headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow, headers=logged_in_headers) assert response.status_code == 422 -def test_get_nonexistent_flow(client: TestClient, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_get_nonexistent_flow(client: AsyncClient, logged_in_headers): uuid = uuid4() - response = client.get(f"api/v1/flows/{uuid}", headers=logged_in_headers) + response = await client.get(f"api/v1/flows/{uuid}", headers=logged_in_headers) assert response.status_code == 404 -def test_update_flow_idempotency(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_update_flow_idempotency(client: AsyncClient, json_flow: str, logged_in_headers): flow_data = orjson.loads(json_flow) data = flow_data["data"] flow_data = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow_data.dict(), headers=logged_in_headers) + response = await client.post("api/v1/flows/", json=flow_data.dict(), headers=logged_in_headers) flow_id = response.json()["id"] updated_flow = FlowCreate(name="Updated Flow", description="description", data=data) - response1 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) - response2 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) + response1 = await client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) + response2 = await client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) assert response1.json() == response2.json() -def test_update_nonexistent_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_update_nonexistent_flow(client: AsyncClient, json_flow: str, logged_in_headers): flow_data = orjson.loads(json_flow) data = flow_data["data"] uuid = uuid4() @@ -323,92 +591,183 @@ def test_update_nonexistent_flow(client: TestClient, json_flow: str, active_user description="description", data=data, ) - response = client.patch(f"api/v1/flows/{uuid}", json=updated_flow.model_dump(), headers=logged_in_headers) + response = await client.patch(f"api/v1/flows/{uuid}", json=updated_flow.model_dump(), headers=logged_in_headers) assert response.status_code == 404, response.text -def test_delete_nonexistent_flow(client: TestClient, active_user, logged_in_headers): +@pytest.mark.usefixtures("active_user") +async def test_delete_nonexistent_flow(client: AsyncClient, logged_in_headers): uuid = uuid4() - response = client.delete(f"api/v1/flows/{uuid}", headers=logged_in_headers) + response = await client.delete(f"api/v1/flows/{uuid}", headers=logged_in_headers) assert response.status_code == 404 -def test_read_only_starter_projects(client: TestClient, active_user, logged_in_headers): - response = client.get("api/v1/flows/", headers=logged_in_headers) - starter_projects = load_starter_projects() +@pytest.mark.usefixtures("active_user") +async def test_read_only_starter_projects(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/flows/basic_examples/", headers=logged_in_headers) + starter_projects = await asyncio.to_thread(load_starter_projects) assert response.status_code == 200 assert len(response.json()) == len(starter_projects) -@pytest.mark.load_flows -def test_load_flows(client: TestClient, load_flows_dir): - response = client.get("api/v1/flows/c54f9130-f2fa-4a3e-b22a-3856d946351b") +def test_sqlite_pragmas(): + db_service = get_db_service() + + with db_service.with_session() as session: + from sqlalchemy import text + + assert session.exec(text("PRAGMA journal_mode;")).scalar() == "wal" + assert session.exec(text("PRAGMA synchronous;")).scalar() == 1 + + +@pytest.mark.usefixtures("active_user") +async def test_read_folder(client: AsyncClient, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description") + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + created_folder = response.json() + folder_id = created_folder["id"] + + # Read the folder + response = await client.get(f"api/v1/folders/{folder_id}", headers=logged_in_headers) assert response.status_code == 200 - assert response.json()["name"] == "BasicExample" - # re-run to ensure updates work well - load_flows_from_directory() - response = client.get("api/v1/flows/c54f9130-f2fa-4a3e-b22a-3856d946351b") + folder_data = response.json() + assert folder_data["name"] == folder_name + assert folder_data["description"] == "Test folder description" + assert "flows" in folder_data + assert isinstance(folder_data["flows"], list) + + +@pytest.mark.usefixtures("active_user") +async def test_read_folder_with_pagination(client: AsyncClient, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description") + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + created_folder = response.json() + folder_id = created_folder["id"] + + # Read the folder with pagination + response = await client.get( + f"api/v1/folders/{folder_id}", headers=logged_in_headers, params={"page": 1, "size": 10} + ) assert response.status_code == 200 - assert response.json()["name"] == "BasicExample" - - -@pytest.mark.load_flows -def test_migrate_transactions(client: TestClient): - monitor_service = get_monitor_service() - drop_and_create_table_if_schema_mismatch(str(monitor_service.db_path), "transactions", TransactionModel) - flow_id = "c54f9130-f2fa-4a3e-b22a-3856d946351b" - data = { - "vertex_id": "vid", - "target_id": "tid", - "inputs": {"input_value": True}, - "outputs": {"output_value": True}, - "timestamp": "2021-10-10T10:10:10", - "status": "success", - "error": None, - "flow_id": flow_id, - } - with new_duckdb_locked_connection(str(monitor_service.db_path), read_only=False) as conn: - add_row_to_table(conn, "transactions", TransactionModel, data) - assert 1 == len(monitor_service.get_transactions()) - - with session_scope() as session: - migrate_transactions_from_monitor_service_to_database(session) - new_trans = get_transactions_by_flow_id(session, UUID(flow_id)) - assert 1 == len(new_trans) - t = new_trans[0] - assert t.error is None - assert t.inputs == data["inputs"] - assert t.outputs == data["outputs"] - assert t.status == data["status"] - assert str(t.timestamp) == "2021-10-10 10:10:10" - assert t.vertex_id == data["vertex_id"] - assert t.target_id == data["target_id"] - assert t.flow_id == UUID(flow_id) - - assert 0 == len(monitor_service.get_transactions()) - - client.request("DELETE", f"api/v1/flows/{flow_id}") - with session_scope() as session: - new_trans = get_transactions_by_flow_id(session, UUID(flow_id)) - assert 0 == len(new_trans) - - -@pytest.mark.load_flows -def test_migrate_transactions_no_duckdb(client: TestClient): - flow_id = "c54f9130-f2fa-4a3e-b22a-3856d946351b" - get_monitor_service() - - with session_scope() as session: - migrate_transactions_from_monitor_service_to_database(session) - new_trans = get_transactions_by_flow_id(session, UUID(flow_id)) - assert 0 == len(new_trans) + folder_data = response.json() + assert isinstance(folder_data, dict) + assert "folder" in folder_data + assert "flows" in folder_data + assert folder_data["folder"]["name"] == folder_name + assert folder_data["folder"]["description"] == "Test folder description" + assert folder_data["flows"]["page"] == 1 + assert folder_data["flows"]["size"] == 10 + assert isinstance(folder_data["flows"]["items"], list) + + +@pytest.mark.usefixtures("active_user") +async def test_read_folder_with_flows(client: AsyncClient, json_flow: str, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + flow_name = f"Test Flow {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description") + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + created_folder = response.json() + folder_id = created_folder["id"] + # Create a flow in the folder + flow_data = orjson.loads(json_flow) + data = flow_data["data"] + flow = FlowCreate(name=flow_name, description="description", data=data) + flow.folder_id = folder_id + response = await client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 -def test_sqlite_pragmas(): - db_service = get_db_service() + # Read the folder with flows + response = await client.get(f"api/v1/folders/{folder_id}", headers=logged_in_headers) + assert response.status_code == 200 + folder_data = response.json() + assert folder_data["name"] == folder_name + assert folder_data["description"] == "Test folder description" + assert len(folder_data["flows"]) == 1 + assert folder_data["flows"][0]["name"] == flow_name - with db_service as session: - from sqlalchemy import text - assert "wal" == session.execute(text("PRAGMA journal_mode;")).fetchone()[0] - assert 1 == session.execute(text("PRAGMA synchronous;")).fetchone()[0] +@pytest.mark.usefixtures("active_user") +async def test_read_nonexistent_folder(client: AsyncClient, logged_in_headers): + nonexistent_id = str(uuid4()) + response = await client.get(f"api/v1/folders/{nonexistent_id}", headers=logged_in_headers) + assert response.status_code == 404 + assert response.json()["detail"] == "Folder not found" + + +@pytest.mark.usefixtures("active_user") +async def test_read_folder_with_search(client: AsyncClient, json_flow: str, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description") + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + created_folder = response.json() + folder_id = created_folder["id"] + + # Create two flows in the folder + flow_data = orjson.loads(json_flow) + flow_name_1 = f"Test Flow 1 {uuid4()}" + flow_name_2 = f"Another Flow {uuid4()}" + + flow1 = FlowCreate( + name=flow_name_1, description="Test flow description", data=flow_data["data"], folder_id=folder_id + ) + flow2 = FlowCreate( + name=flow_name_2, description="Another flow description", data=flow_data["data"], folder_id=folder_id + ) + flow1.folder_id = folder_id + flow2.folder_id = folder_id + await client.post("api/v1/flows/", json=flow1.model_dump(), headers=logged_in_headers) + await client.post("api/v1/flows/", json=flow2.model_dump(), headers=logged_in_headers) + + # Read the folder with search + response = await client.get( + f"api/v1/folders/{folder_id}", headers=logged_in_headers, params={"search": "Test", "page": 1, "size": 10} + ) + assert response.status_code == 200 + folder_data = response.json() + assert len(folder_data["flows"]["items"]) == 1 + assert folder_data["flows"]["items"][0]["name"] == flow_name_1 + + +@pytest.mark.usefixtures("active_user") +async def test_read_folder_with_component_filter(client: AsyncClient, json_flow: str, logged_in_headers): + # Create a new folder + folder_name = f"Test Folder {uuid4()}" + folder = FolderCreate(name=folder_name, description="Test folder description") + response = await client.post("api/v1/folders/", json=folder.model_dump(), headers=logged_in_headers) + assert response.status_code == 201 + created_folder = response.json() + folder_id = created_folder["id"] + + # Create a component flow in the folder + flow_data = orjson.loads(json_flow) + component_flow_name = f"Component Flow {uuid4()}" + component_flow = FlowCreate( + name=component_flow_name, + description="Component flow description", + data=flow_data["data"], + folder_id=folder_id, + is_component=True, + ) + component_flow.folder_id = folder_id + await client.post("api/v1/flows/", json=component_flow.model_dump(), headers=logged_in_headers) + + # Read the folder with component filter + response = await client.get( + f"api/v1/folders/{folder_id}", headers=logged_in_headers, params={"is_component": True, "page": 1, "size": 10} + ) + assert response.status_code == 200 + folder_data = response.json() + assert len(folder_data["flows"]["items"]) == 1 + assert folder_data["flows"]["items"][0]["name"] == component_flow_name + assert folder_data["flows"]["items"][0]["is_component"] == True # noqa: E712 diff --git a/src/backend/tests/unit/test_endpoints.py b/src/backend/tests/unit/test_endpoints.py new file mode 100644 index 000000000000..eb18636162bd --- /dev/null +++ b/src/backend/tests/unit/test_endpoints.py @@ -0,0 +1,531 @@ +import asyncio +from uuid import UUID, uuid4 + +import pytest +from fastapi import status +from httpx import AsyncClient +from langflow.custom.directory_reader.directory_reader import DirectoryReader +from langflow.services.deps import get_settings_service + + +async def run_post(client, flow_id, headers, post_data): + response = await client.post( + f"api/v1/process/{flow_id}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + return response.json() + + +# Helper function to poll task status +async def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1): + for _ in range(max_attempts): + task_status_response = await client.get( + href, + headers=headers, + ) + if task_status_response.status_code == 200 and task_status_response.json()["status"] == "SUCCESS": + return task_status_response.json() + await asyncio.sleep(sleep_time) + return None # Return None if task did not complete in time + + +PROMPT_REQUEST = { + "name": "string", + "template": "string", + "frontend_node": { + "template": {}, + "description": "string", + "base_classes": ["string"], + "name": "", + "display_name": "", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "field_formatters": { + "formatters": {"openai_api_key": {}}, + "base_formatters": { + "kwargs": {}, + "optional": {}, + "list": {}, + "dict": {}, + "union": {}, + "multiline": {}, + "show": {}, + "password": {}, + "default": {}, + "headers": {}, + "dict_code_file": {}, + "model_fields": { + "MODEL_DICT": { + "OpenAI": [ + "text-davinci-003", + "text-davinci-002", + "text-curie-001", + "text-babbage-001", + "text-ada-001", + ], + "ChatOpenAI": [ + "gpt-4-turbo-preview", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106", + ], + "Anthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0", + ], + "ChatAnthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0", + ], + } + }, + }, + }, + }, +} + + +@pytest.mark.benchmark +async def test_get_all(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/all", headers=logged_in_headers) + assert response.status_code == 200 + settings = get_settings_service().settings + dir_reader = DirectoryReader(settings.components_path[0]) + files = dir_reader.get_files() + # json_response is a dict of dicts + all_names = [component_name for _, components in response.json().items() for component_name in components] + json_response = response.json() + # We need to test the custom nodes + assert len(all_names) <= len( + files + ) # Less or equal because we might have some files that don't have the dependencies installed + assert "ChatInput" in json_response["inputs"] + assert "Prompt" in json_response["prompts"] + assert "ChatOutput" in json_response["outputs"] + + +async def test_post_validate_code(client: AsyncClient): + # Test case with a valid import and function + code1 = """ +import math + +def square(x): + return x ** 2 +""" + response1 = await client.post("api/v1/validate/code", json={"code": code1}) + assert response1.status_code == 200 + assert response1.json() == {"imports": {"errors": []}, "function": {"errors": []}} + + # Test case with an invalid import and valid function + code2 = """ +import non_existent_module + +def square(x): + return x ** 2 +""" + response2 = await client.post("api/v1/validate/code", json={"code": code2}) + assert response2.status_code == 200 + assert response2.json() == { + "imports": {"errors": ["No module named 'non_existent_module'"]}, + "function": {"errors": []}, + } + + # Test case with a valid import and invalid function syntax + code3 = """ +import math + +def square(x) + return x ** 2 +""" + response3 = await client.post("api/v1/validate/code", json={"code": code3}) + assert response3.status_code == 200 + assert response3.json() == { + "imports": {"errors": []}, + "function": {"errors": ["expected ':' (, line 4)"]}, + } + + # Test case with invalid JSON payload + response4 = await client.post("api/v1/validate/code", json={"invalid_key": code1}) + assert response4.status_code == 422 + + # Test case with an empty code string + response5 = await client.post("api/v1/validate/code", json={"code": ""}) + assert response5.status_code == 200 + assert response5.json() == {"imports": {"errors": []}, "function": {"errors": []}} + + # Test case with a syntax error in the code + code6 = """ +import math + +def square(x) + return x ** 2 +""" + response6 = await client.post("api/v1/validate/code", json={"code": code6}) + assert response6.status_code == 200 + assert response6.json() == { + "imports": {"errors": []}, + "function": {"errors": ["expected ':' (, line 4)"]}, + } + + +VALID_PROMPT = """ +I want you to act as a naming consultant for new companies. + +Here are some examples of good company names: + +- search engine, Google +- social media, Facebook +- video sharing, YouTube + +The name should be short, catchy and easy to remember. + +What is a good name for a company that makes {product}? +""" + +INVALID_PROMPT = "This is an invalid prompt without any input variable." + + +async def test_valid_prompt(client: AsyncClient): + PROMPT_REQUEST["template"] = VALID_PROMPT + response = await client.post("api/v1/validate/prompt", json=PROMPT_REQUEST) + assert response.status_code == 200 + assert response.json()["input_variables"] == ["product"] + + +async def test_invalid_prompt(client: AsyncClient): + PROMPT_REQUEST["template"] = INVALID_PROMPT + response = await client.post( + "api/v1/validate/prompt", + json=PROMPT_REQUEST, + ) + assert response.status_code == 200 + assert response.json()["input_variables"] == [] + + +@pytest.mark.parametrize( + ("prompt", "expected_input_variables"), + [ + ("{color} is my favorite color.", ["color"]), + ("The weather is {weather} today.", ["weather"]), + ("This prompt has no variables.", []), + ("{a}, {b}, and {c} are variables.", ["a", "b", "c"]), + ], +) +async def test_various_prompts(client, prompt, expected_input_variables): + PROMPT_REQUEST["template"] = prompt + response = await client.post("api/v1/validate/prompt", json=PROMPT_REQUEST) + assert response.status_code == 200 + assert response.json()["input_variables"] == expected_input_variables + + +async def test_get_vertices_flow_not_found(client, logged_in_headers): + uuid = uuid4() + response = await client.post(f"/api/v1/build/{uuid}/vertices", headers=logged_in_headers) + assert response.status_code == 500 + + +async def test_get_vertices(client, added_flow_webhook_test, logged_in_headers): + flow_id = added_flow_webhook_test["id"] + response = await client.post(f"/api/v1/build/{flow_id}/vertices", headers=logged_in_headers) + assert response.status_code == 200 + assert "ids" in response.json() + # The response should contain the list in this order + # ['ConversationBufferMemory-Lu2Nb', 'PromptTemplate-5Q0W8', 'ChatOpenAI-vy7fV', 'LLMChain-UjBh1'] + # The important part is before the - (ConversationBufferMemory, PromptTemplate, ChatOpenAI, LLMChain) + ids = [_id.split("-")[0] for _id in response.json()["ids"]] + + assert set(ids) == {"ChatInput"} + + +async def test_build_vertex_invalid_flow_id(client, logged_in_headers): + uuid = uuid4() + response = await client.post(f"/api/v1/build/{uuid}/vertices/vertex_id", headers=logged_in_headers) + assert response.status_code == 500 + + +async def test_build_vertex_invalid_vertex_id(client, added_flow_webhook_test, logged_in_headers): + flow_id = added_flow_webhook_test["id"] + response = await client.post(f"/api/v1/build/{flow_id}/vertices/invalid_vertex_id", headers=logged_in_headers) + assert response.status_code == 500 + + +async def test_successful_run_no_payload(client, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 1 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all("ChatOutput" in _id for _id in ids) + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all(name in display_names for name in ["Chat Output"]) + output_results_has_results = all("results" in output.get("results") for output in outputs_dict.get("outputs")) + inner_results = [output.get("results") for output in outputs_dict.get("outputs")] + + assert all(result is not None for result in inner_results), (outputs_dict, output_results_has_results) + + +async def test_successful_run_with_output_type_text(client, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "output_type": "text", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 1 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all("ChatOutput" in _id for _id in ids), ids + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all(name in display_names for name in ["Chat Output"]), display_names + inner_results = [output.get("results") for output in outputs_dict.get("outputs")] + expected_keys = ["message"] + assert all(key in result for result in inner_results for key in expected_keys), outputs_dict + + +@pytest.mark.benchmark +async def test_successful_run_with_output_type_any(client, simple_api_test, created_api_key): + # This one should have both the ChatOutput and TextOutput components + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "output_type": "any", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 1 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all("ChatOutput" in _id or "TextOutput" in _id for _id in ids), ids + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all(name in display_names for name in ["Chat Output"]), display_names + inner_results = [output.get("results") for output in outputs_dict.get("outputs")] + expected_keys = ["message"] + assert all(key in result for result in inner_results for key in expected_keys), outputs_dict + + +@pytest.mark.benchmark +async def test_successful_run_with_output_type_debug(client, simple_api_test, created_api_key): + # This one should return outputs for all components + # Let's just check the amount of outputs(there should be 7) + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "output_type": "debug", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 3 + + +@pytest.mark.benchmark +async def test_successful_run_with_input_type_text(client, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "input_type": "text", + "output_type": "debug", + "input_value": "value1", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 3 + # Now we get all components that contain TextInput in the component_id + text_input_outputs = [output for output in outputs_dict.get("outputs") if "TextInput" in output.get("component_id")] + assert len(text_input_outputs) == 1 + # Now we check if the input_value is correct + # We get text key twice because the output is now a Message + assert all( + output.get("results").get("text").get("text") == "value1" for output in text_input_outputs + ), text_input_outputs + + +@pytest.mark.api_key_required +@pytest.mark.benchmark +async def test_successful_run_with_input_type_chat(client: AsyncClient, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "input_type": "chat", + "output_type": "debug", + "input_value": "value1", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 3 + # Now we get all components that contain TextInput in the component_id + chat_input_outputs = [output for output in outputs_dict.get("outputs") if "ChatInput" in output.get("component_id")] + assert len(chat_input_outputs) == 1 + # Now we check if the input_value is correct + assert all( + output.get("results").get("message").get("text") == "value1" for output in chat_input_outputs + ), chat_input_outputs + + +@pytest.mark.benchmark +async def test_invalid_run_with_input_type_chat(client, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "input_type": "chat", + "output_type": "debug", + "input_value": "value1", + "tweaks": {"Chat Input": {"input_value": "value2"}}, + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_400_BAD_REQUEST, response.text + assert "If you pass an input_value to the chat input, you cannot pass a tweak with the same name." in response.text + + +@pytest.mark.benchmark +async def test_successful_run_with_input_type_any(client, simple_api_test, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = simple_api_test["id"] + payload = { + "input_type": "any", + "output_type": "debug", + "input_value": "value1", + } + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 3 + # Now we get all components that contain TextInput or ChatInput in the component_id + any_input_outputs = [ + output + for output in outputs_dict.get("outputs") + if "TextInput" in output.get("component_id") or "ChatInput" in output.get("component_id") + ] + assert len(any_input_outputs) == 2 + # Now we check if the input_value is correct + all_result_dicts = [output.get("results") for output in any_input_outputs] + all_message_or_text_dicts = [ + result_dict.get("message", result_dict.get("text")) for result_dict in all_result_dicts + ] + assert all( + message_or_text_dict.get("text") == "value1" for message_or_text_dict in all_message_or_text_dicts + ), any_input_outputs + + +async def test_invalid_flow_id(client, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = "invalid-flow-id" + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND, response.text + headers = {"x-api-key": created_api_key.api_key} + flow_id = UUID(int=0) + response = await client.post(f"/api/v1/run/{flow_id}", headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND, response.text + # Check if the error detail is as expected + + +@pytest.mark.benchmark +async def test_starter_projects(client, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + response = await client.get("api/v1/starter-projects/", headers=headers) + assert response.status_code == status.HTTP_200_OK, response.text diff --git a/src/backend/tests/unit/test_experimental_components.py b/src/backend/tests/unit/test_experimental_components.py index 7495cf47cb9d..e4f6593f2fbb 100644 --- a/src/backend/tests/unit/test_experimental_components.py +++ b/src/backend/tests/unit/test_experimental_components.py @@ -1,10 +1,4 @@ from langflow.components import prototypes -import pytest - - -@pytest.fixture -def client(): - pass def test_python_function_component(): @@ -14,8 +8,13 @@ def test_python_function_component(): # Act # function must be a string representation function = "def function():\n return 'Hello, World!'" + python_function_component.function_code = function # result is the callable function - result = python_function_component.build(function) + result = python_function_component.get_function_callable() + result_message = python_function_component.execute_function_message() + result_data = python_function_component.execute_function_data() # Assert assert result() == "Hello, World!" + assert result_message.text == "Hello, World!" + assert result_data[0].text == "Hello, World!" diff --git a/src/backend/tests/unit/test_files.py b/src/backend/tests/unit/test_files.py index 4c4d9c73e4fc..464b5b3504f4 100644 --- a/src/backend/tests/unit/test_files.py +++ b/src/backend/tests/unit/test_files.py @@ -1,10 +1,17 @@ +import asyncio import re +import shutil +import tempfile +from contextlib import suppress +from pathlib import Path from unittest.mock import MagicMock import pytest - +from asgi_lifespan import LifespanManager +from httpx import ASGITransport, AsyncClient from langflow.services.deps import get_storage_service from langflow.services.storage.service import StorageService +from sqlmodel import Session @pytest.fixture @@ -19,12 +26,55 @@ def mock_storage_service(): return service -def test_upload_file(client, mock_storage_service, created_api_key, flow): +@pytest.fixture(name="files_client") +async def files_client_fixture( + session: Session, # noqa: ARG001 + monkeypatch, + request, + load_flows_dir, + mock_storage_service, +): + # Set the database url to a test database + if "noclient" in request.keywords: + yield + else: + + def init_app(): + db_dir = tempfile.mkdtemp() + db_path = Path(db_dir) / "test.db" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") + if "load_flows" in request.keywords: + shutil.copyfile( + pytest.BASIC_EXAMPLE_PATH, Path(load_flows_dir) / "c54f9130-f2fa-4a3e-b22a-3856d946351b.json" + ) + monkeypatch.setenv("LANGFLOW_LOAD_FLOWS_PATH", load_flows_dir) + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "true") + + from langflow.main import create_app + + app = create_app() + return app, db_path + + app, db_path = await asyncio.to_thread(init_app) + + app.dependency_overrides[get_storage_service] = lambda: mock_storage_service + async with ( + LifespanManager(app, startup_timeout=None, shutdown_timeout=None) as manager, + AsyncClient(transport=ASGITransport(app=manager.app), base_url="http://testserver/") as client, + ): + yield client + # app.dependency_overrides.clear() + monkeypatch.undo() + # clear the temp db + with suppress(FileNotFoundError): + db_path.unlink() + + +async def test_upload_file(files_client, created_api_key, flow): headers = {"x-api-key": created_api_key.api_key} - # Replace the actual storage service with the mock - client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service - response = client.post( + response = await files_client.post( f"api/v1/files/upload/{flow.id}", files={"file": ("test.txt", b"test content")}, headers=headers, @@ -39,41 +89,36 @@ def test_upload_file(client, mock_storage_service, created_api_key, flow): assert file_path_pattern.match(response_json["file_path"]) -def test_download_file(client, mock_storage_service, created_api_key, flow): +async def test_download_file(files_client, created_api_key, flow): headers = {"x-api-key": created_api_key.api_key} - client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service - - response = client.get(f"api/v1/files/download/{flow.id}/test.txt", headers=headers) + response = await files_client.get(f"api/v1/files/download/{flow.id}/test.txt", headers=headers) assert response.status_code == 200 assert response.content == b"file content" -def test_list_files(client, mock_storage_service, created_api_key, flow): +async def test_list_files(files_client, created_api_key, flow): headers = {"x-api-key": created_api_key.api_key} - client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service - - response = client.get(f"api/v1/files/list/{flow.id}", headers=headers) + response = await files_client.get(f"api/v1/files/list/{flow.id}", headers=headers) assert response.status_code == 200 assert response.json() == {"files": ["file1.txt", "file2.jpg"]} -def test_delete_file(client, mock_storage_service, created_api_key, flow): +async def test_delete_file(files_client, created_api_key, flow): headers = {"x-api-key": created_api_key.api_key} - client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service - response = client.delete(f"api/v1/files/delete/{flow.id}/test.txt", headers=headers) + response = await files_client.delete(f"api/v1/files/delete/{flow.id}/test.txt", headers=headers) assert response.status_code == 200 assert response.json() == {"message": "File test.txt deleted successfully"} -def test_file_operations(client, created_api_key, flow): +async def test_file_operations(client, created_api_key, flow): headers = {"x-api-key": created_api_key.api_key} flow_id = flow.id file_name = "test.txt" file_content = b"Hello, world!" # Step 1: Upload the file - response = client.post( + response = await client.post( f"api/v1/files/upload/{flow_id}", files={"file": (file_name, file_content)}, headers=headers, @@ -91,21 +136,21 @@ def test_file_operations(client, created_api_key, flow): full_file_name = response_json["file_path"].split("/")[-1] # Step 2: List files in the folder - response = client.get(f"api/v1/files/list/{flow_id}", headers=headers) + response = await client.get(f"api/v1/files/list/{flow_id}", headers=headers) assert response.status_code == 200 assert full_file_name in response.json()["files"] # Step 3: Download the file and verify its content - response = client.get(f"api/v1/files/download/{flow_id}/{full_file_name}", headers=headers) + response = await client.get(f"api/v1/files/download/{flow_id}/{full_file_name}", headers=headers) assert response.status_code == 200 assert response.content == file_content assert response.headers["content-type"] == "application/octet-stream" # Step 4: Delete the file - response = client.delete(f"api/v1/files/delete/{flow_id}/{full_file_name}", headers=headers) + response = await client.delete(f"api/v1/files/delete/{flow_id}/{full_file_name}", headers=headers) assert response.status_code == 200 assert response.json() == {"message": f"File {full_file_name} deleted successfully"} # Verify that the file is indeed deleted - response = client.get(f"api/v1/files/list/{flow_id}", headers=headers) + response = await client.get(f"api/v1/files/list/{flow_id}", headers=headers) assert full_file_name not in response.json()["files"] diff --git a/src/backend/tests/unit/test_frontend_nodes.py b/src/backend/tests/unit/test_frontend_nodes.py index ca8c37af9561..0a4123f10072 100644 --- a/src/backend/tests/unit/test_frontend_nodes.py +++ b/src/backend/tests/unit/test_frontend_nodes.py @@ -1,15 +1,9 @@ import pytest - from langflow.template.field.base import Input from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -@pytest.fixture -def client(): - pass - - @pytest.fixture def sample_template_field() -> Input: return Input(name="test_field", field_type="str") @@ -40,11 +34,11 @@ def test_template_field_defaults(sample_template_field: Input): assert sample_template_field.value is None assert sample_template_field.file_types == [] assert sample_template_field.file_path == "" - assert sample_template_field.password is False assert sample_template_field.name == "test_field" + assert sample_template_field.password is None -def test_template_to_dict(sample_template: Template, sample_template_field: Input): +def test_template_to_dict(sample_template: Template): template_dict = sample_template.to_dict() assert template_dict["_type"] == "test_template" assert len(template_dict) == 2 # _type and test_field diff --git a/src/backend/tests/unit/test_helper_components.py b/src/backend/tests/unit/test_helper_components.py index fc343fbaabd0..6469e6026252 100644 --- a/src/backend/tests/unit/test_helper_components.py +++ b/src/backend/tests/unit/test_helper_components.py @@ -1,13 +1,9 @@ -from langflow.components import helpers +from pathlib import Path + +from langflow.components import helpers, processing from langflow.custom.utils import build_custom_component_template from langflow.schema import Data -import pytest - - -@pytest.fixture -def client(): - pass - +from langflow.schema.message import Message # def test_update_data_component(): # # Arrange @@ -39,7 +35,7 @@ def client(): def test_uuid_generator_component(): # Arrange uuid_generator_component = helpers.IDGeneratorComponent() - uuid_generator_component._code = open(helpers.IDGenerator.__file__, "r").read() + uuid_generator_component._code = Path(helpers.id_generator.__file__).read_text(encoding="utf-8") frontend_node, _ = build_custom_component_template(uuid_generator_component) @@ -47,18 +43,17 @@ def test_uuid_generator_component(): build_config = frontend_node.get("template") field_name = "unique_id" build_config = uuid_generator_component.update_build_config(build_config, None, field_name) - unique_id = build_config["unique_id"]["value"] - result = uuid_generator_component.build(unique_id) + result = uuid_generator_component.generate_id() # Assert # UUID should be a string of length 36 - assert isinstance(result, str) - assert len(result) == 36 + assert isinstance(result, Message) + assert len(result.text) == 36 def test_data_as_text_component(): # Arrange - data_as_text_component = helpers.ParseDataComponent() + data_as_text_component = processing.ParseDataComponent() # Act # Replace with your actual test data diff --git a/src/backend/tests/unit/test_initial_setup.py b/src/backend/tests/unit/test_initial_setup.py index 395f3443b240..348102551de2 100644 --- a/src/backend/tests/unit/test_initial_setup.py +++ b/src/backend/tests/unit/test_initial_setup.py @@ -1,10 +1,11 @@ +import asyncio from datetime import datetime from pathlib import Path import pytest -from sqlmodel import select - -from langflow.custom.directory_reader.utils import build_custom_component_list_from_path +from langflow.custom.directory_reader.utils import ( + abuild_custom_component_list_from_path, +) from langflow.initial_setup.setup import ( STARTER_FOLDER_NAME, get_project_data, @@ -14,6 +15,7 @@ from langflow.interface.types import aget_all_types_dict from langflow.services.database.models.folder.model import Folder from langflow.services.deps import session_scope +from sqlmodel import select def test_load_starter_projects(): @@ -34,7 +36,11 @@ def test_get_project_data(): project_data, project_icon, project_icon_bg_color, + project_gradient, + project_tags, ) = get_project_data(project) + assert isinstance(project_gradient, str) or project_gradient is None + assert isinstance(project_tags, list) assert isinstance(project_name, str) assert isinstance(project_description, str) assert isinstance(project_is_component, bool) @@ -44,23 +50,23 @@ def test_get_project_data(): assert isinstance(project_icon_bg_color, str) or project_icon_bg_color is None -@pytest.mark.asyncio +@pytest.mark.usefixtures("client") async def test_create_or_update_starter_projects(): with session_scope() as session: # Get the number of projects returned by load_starter_projects - num_projects = len(load_starter_projects()) + num_projects = len(await asyncio.to_thread(load_starter_projects)) # Get the number of projects in the database folder = session.exec(select(Folder).where(Folder.name == STARTER_FOLDER_NAME)).first() assert folder is not None num_db_projects = len(folder.flows) - # Check that the number of projects in the database is the same as the number of projects returned by load_starter_projects + # Check that the number of projects in the database is the same as the number of projects returned by + # load_starter_projects assert num_db_projects == num_projects # Some starter projects require integration -# @pytest.mark.asyncio # async def test_starter_projects_can_run_successfully(client): # with session_scope() as session: # # Run the function to create or update projects @@ -72,7 +78,8 @@ async def test_create_or_update_starter_projects(): # # Get the number of projects in the database # num_db_projects = session.exec(select(func.count(Flow.id)).where(Flow.folder == STARTER_FOLDER_NAME)).one() -# # Check that the number of projects in the database is the same as the number of projects returned by load_starter_projects +# # Check that the number of projects in the database is the same as the number of projects returned by +# # load_starter_projects # assert num_db_projects == num_projects # # Get all the starter projects @@ -95,39 +102,39 @@ async def test_create_or_update_starter_projects(): # delete_messages(session_id="test") -def find_componeny_by_name(components, name): - for category, children in components.items(): +def find_component_by_name(components, name): + for children in components.values(): if name in children: return children[name] - raise ValueError(f"Component {name} not found in components") + msg = f"Component {name} not found in components" + raise ValueError(msg) def set_value(component, input_name, value): component["template"][input_name]["value"] = value -def component_to_node(id, type, component): - return {"id": type + id, "data": {"node": component, "type": type, "id": id}} +def component_to_node(node_id, node_type, component): + return {"id": node_type + node_id, "data": {"node": component, "type": node_type, "id": node_id}} -def add_edge(input, output, from_output, to_input): +def add_edge(source, target, from_output, to_input): return { - "source": input, - "target": output, + "source": source, + "target": target, "data": { - "sourceHandle": {"dataType": "ChatInput", "id": input, "name": from_output, "output_types": ["Message"]}, - "targetHandle": {"fieldName": to_input, "id": output, "inputTypes": ["Message"], "type": "str"}, + "sourceHandle": {"dataType": "ChatInput", "id": source, "name": from_output, "output_types": ["Message"]}, + "targetHandle": {"fieldName": to_input, "id": target, "inputTypes": ["Message"], "type": "str"}, }, } -@pytest.mark.asyncio async def test_refresh_starter_projects(): data_path = str(Path(__file__).parent.parent.parent.absolute() / "base" / "langflow" / "components") - components = build_custom_component_list_from_path(data_path) + components = await abuild_custom_component_list_from_path(data_path) - chat_input = find_componeny_by_name(components, "ChatInput") - chat_output = find_componeny_by_name(components, "ChatOutput") + chat_input = find_component_by_name(components, "ChatInput") + chat_output = find_component_by_name(components, "ChatOutput") chat_output["template"]["code"]["value"] = "changed !" del chat_output["template"]["should_store_message"] graph_data = { diff --git a/src/backend/tests/unit/test_kubernetes_secrets.py b/src/backend/tests/unit/test_kubernetes_secrets.py index 9de359535f16..9da44cdef82a 100644 --- a/src/backend/tests/unit/test_kubernetes_secrets.py +++ b/src/backend/tests/unit/test_kubernetes_secrets.py @@ -1,20 +1,20 @@ -import pytest -from unittest.mock import MagicMock -from kubernetes.client import V1ObjectMeta, V1Secret from base64 import b64encode +from unittest.mock import MagicMock from uuid import UUID +import pytest +from kubernetes.client import V1ObjectMeta, V1Secret from langflow.services.variable.kubernetes_secrets import KubernetesSecretManager, encode_user_id @pytest.fixture -def mock_kube_config(mocker): +def _mock_kube_config(mocker): mocker.patch("kubernetes.config.load_kube_config") mocker.patch("kubernetes.config.load_incluster_config") @pytest.fixture -def secret_manager(mock_kube_config): +def secret_manager(_mock_kube_config): return KubernetesSecretManager(namespace="test-namespace") @@ -33,13 +33,13 @@ def test_create_secret(secret_manager, mocker): kind="Secret", metadata=V1ObjectMeta(name="test-secret"), type="Opaque", - data={"key": b64encode("value".encode()).decode()}, + data={"key": b64encode(b"value").decode()}, ), ) def test_get_secret(secret_manager, mocker): - mock_secret = V1Secret(data={"key": b64encode("value".encode()).decode()}) + mock_secret = V1Secret(data={"key": b64encode(b"value").decode()}) mocker.patch.object(secret_manager.core_api, "read_namespaced_secret", return_value=mock_secret) secret_data = secret_manager.get_secret(name="test-secret") diff --git a/src/backend/tests/unit/test_loading.py b/src/backend/tests/unit/test_loading.py index d73bb58f7c07..090811f63b81 100644 --- a/src/backend/tests/unit/test_loading.py +++ b/src/backend/tests/unit/test_loading.py @@ -1,34 +1,29 @@ -import pytest +import asyncio from langflow.graph import Graph from langflow.initial_setup.setup import load_starter_projects from langflow.load import load_flow_from_json +# TODO: UPDATE BASIC EXAMPLE +# def test_load_flow_from_json(): +# """Test loading a flow from a json file""" +# loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH) +# assert loaded is not None +# assert isinstance(loaded, Graph) -@pytest.fixture -def client(): - pass - -def test_load_flow_from_json(): - """Test loading a flow from a json file""" - loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH) - assert loaded is not None - assert isinstance(loaded, Graph) - - -def test_load_flow_from_json_with_tweaks(): - """Test loading a flow from a json file and applying tweaks""" - tweaks = {"dndnode_82": {"model_name": "gpt-3.5-turbo-16k-0613"}} - loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH, tweaks=tweaks) - assert loaded is not None - assert isinstance(loaded, Graph) +# def test_load_flow_from_json_with_tweaks(): +# """Test loading a flow from a json file and applying tweaks""" +# tweaks = {"dndnode_82": {"model_name": "gpt-3.5-turbo-16k-0613"}} +# loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH, tweaks=tweaks) +# assert loaded is not None +# assert isinstance(loaded, Graph) -def test_load_flow_from_json_object(): - """Test loading a flow from a json file and applying tweaks""" - _, projects = zip(*load_starter_projects()) - project = projects[0] - loaded = load_flow_from_json(project) +async def test_load_flow_from_json_object(): + """Test loading a flow from a json file and applying tweaks.""" + result = await asyncio.to_thread(load_starter_projects) + project = result[0][1] + loaded = await asyncio.to_thread(load_flow_from_json, project) assert loaded is not None assert isinstance(loaded, Graph) diff --git a/src/backend/tests/unit/test_logger.py b/src/backend/tests/unit/test_logger.py index 91fa66a75fc9..ab49ff948f07 100644 --- a/src/backend/tests/unit/test_logger.py +++ b/src/backend/tests/unit/test_logger.py @@ -1,13 +1,9 @@ -import pytest -import os import json +import os from unittest.mock import patch -from langflow.logging.logger import SizedLogBuffer - -@pytest.fixture -def client(): - pass +import pytest +from langflow.logging.logger import SizedLogBuffer @pytest.fixture @@ -32,8 +28,8 @@ def test_write(sized_log_buffer): sized_log_buffer.max = 1 # Set max size to 1 for testing sized_log_buffer.write(message) assert len(sized_log_buffer.buffer) == 1 - assert 1625097600124 == sized_log_buffer.buffer[0][0] - assert "Test log" == sized_log_buffer.buffer[0][1] + assert sized_log_buffer.buffer[0][0] == 1625097600124 + assert sized_log_buffer.buffer[0][1] == "Test log" def test_write_overflow(sized_log_buffer): @@ -43,8 +39,8 @@ def test_write_overflow(sized_log_buffer): sized_log_buffer.write(message) assert len(sized_log_buffer.buffer) == 2 - assert 1625097601000 == sized_log_buffer.buffer[0][0] - assert 1625097602000 == sized_log_buffer.buffer[1][0] + assert sized_log_buffer.buffer[0][0] == 1625097601000 + assert sized_log_buffer.buffer[1][0] == 1625097602000 def test_len(sized_log_buffer): diff --git a/src/backend/tests/unit/test_login.py b/src/backend/tests/unit/test_login.py index f4bc05bd59e6..16864f3ca487 100644 --- a/src/backend/tests/unit/test_login.py +++ b/src/backend/tests/unit/test_login.py @@ -15,7 +15,7 @@ def test_user(): ) -def test_login_successful(client, test_user): +async def test_login_successful(client, test_user): # Adding the test user to the database try: with session_scope() as session: @@ -24,22 +24,22 @@ def test_login_successful(client, test_user): except IntegrityError: pass - response = client.post("api/v1/login", data={"username": "testuser", "password": "testpassword"}) + response = await client.post("api/v1/login", data={"username": "testuser", "password": "testpassword"}) assert response.status_code == 200 assert "access_token" in response.json() -def test_login_unsuccessful_wrong_username(client): - response = client.post("api/v1/login", data={"username": "wrongusername", "password": "testpassword"}) +async def test_login_unsuccessful_wrong_username(client): + response = await client.post("api/v1/login", data={"username": "wrongusername", "password": "testpassword"}) assert response.status_code == 401 assert response.json()["detail"] == "Incorrect username or password" -def test_login_unsuccessful_wrong_password(client, test_user, session): +async def test_login_unsuccessful_wrong_password(client, test_user, session): # Adding the test user to the database session.add(test_user) session.commit() - response = client.post("api/v1/login", data={"username": "testuser", "password": "wrongpassword"}) + response = await client.post("api/v1/login", data={"username": "testuser", "password": "wrongpassword"}) assert response.status_code == 401 assert response.json()["detail"] == "Incorrect username or password" diff --git a/src/backend/tests/unit/test_messages.py b/src/backend/tests/unit/test_messages.py index 5ae53bb3440b..579016391fd9 100644 --- a/src/backend/tests/unit/test_messages.py +++ b/src/backend/tests/unit/test_messages.py @@ -1,7 +1,19 @@ -import pytest +from datetime import datetime, timezone +from uuid import UUID, uuid4 -from langflow.memory import add_messages, add_messagetables, delete_messages, get_messages, store_message +import pytest +from langflow.memory import ( + add_messages, + add_messagetables, + delete_messages, + get_messages, + store_message, + update_messages, +) +from langflow.schema.content_block import ContentBlock +from langflow.schema.content_types import TextContent, ToolContent from langflow.schema.message import Message +from langflow.schema.properties import Properties, Source # Assuming you have these imports available from langflow.services.database.models.message import MessageCreate, MessageRead @@ -10,32 +22,29 @@ from langflow.services.tracing.utils import convert_to_langchain_type -@pytest.fixture() +@pytest.fixture def created_message(): with session_scope() as session: message = MessageCreate(text="Test message", sender="User", sender_name="User", session_id="session_id") messagetable = MessageTable.model_validate(message, from_attributes=True) messagetables = add_messagetables([messagetable], session) - message_read = MessageRead.model_validate(messagetables[0], from_attributes=True) - return message_read + return MessageRead.model_validate(messagetables[0], from_attributes=True) -@pytest.fixture() -def created_messages(session): - with session_scope() as session: +@pytest.fixture +def created_messages(session): # noqa: ARG001 + with session_scope() as _session: messages = [ MessageCreate(text="Test message 1", sender="User", sender_name="User", session_id="session_id2"), MessageCreate(text="Test message 2", sender="User", sender_name="User", session_id="session_id2"), MessageCreate(text="Test message 3", sender="User", sender_name="User", session_id="session_id2"), ] messagetables = [MessageTable.model_validate(message, from_attributes=True) for message in messages] - messagetables = add_messagetables(messagetables, session) - messages_read = [ - MessageRead.model_validate(messagetable, from_attributes=True) for messagetable in messagetables - ] - return messages_read + messagetables = add_messagetables(messagetables, _session) + return [MessageRead.model_validate(messagetable, from_attributes=True) for messagetable in messagetables] +@pytest.mark.usefixtures("client") def test_get_messages(): add_messages( [ @@ -49,6 +58,7 @@ def test_get_messages(): assert messages[1].text == "Test message 2" +@pytest.mark.usefixtures("client") def test_add_messages(): message = Message(text="New Test message", sender="User", sender_name="User", session_id="new_session_id") messages = add_messages(message) @@ -56,6 +66,7 @@ def test_add_messages(): assert messages[0].text == "New Test message" +@pytest.mark.usefixtures("client") def test_add_messagetables(session): messages = [MessageTable(text="New Test message", sender="User", sender_name="User", session_id="new_session_id")] added_messages = add_messagetables(messages, session) @@ -63,6 +74,7 @@ def test_add_messagetables(session): assert added_messages[0].text == "New Test message" +@pytest.mark.usefixtures("client") def test_delete_messages(session): session_id = "session_id2" delete_messages(session_id) @@ -70,6 +82,7 @@ def test_delete_messages(session): assert len(messages) == 0 +@pytest.mark.usefixtures("client") def test_store_message(): message = Message(text="Stored message", sender="User", sender_name="User", session_id="stored_session_id") stored_messages = store_message(message) @@ -82,10 +95,10 @@ def test_convert_to_langchain(method_name): def convert(value): if method_name == "message": return value.to_lc_message() - elif method_name == "convert_to_langchain_type": + if method_name == "convert_to_langchain_type": return convert_to_langchain_type(value) - else: - raise ValueError(f"Invalid method: {method_name}") + msg = f"Invalid method: {method_name}" + raise ValueError(msg) lc_message = convert(Message(text="Test message 1", sender="User", sender_name="User", session_id="session_id2")) assert lc_message.content == "Test message 1" @@ -100,3 +113,188 @@ def convert(value): assert lc_message.content == "" assert lc_message.type == "ai" assert len(list(iterator)) == 2 + + +@pytest.mark.usefixtures("client") +def test_update_single_message(created_message): + # Modify the message + created_message.text = "Updated message" + updated = update_messages(created_message) + + assert len(updated) == 1 + assert updated[0].text == "Updated message" + assert updated[0].id == created_message.id + + +@pytest.mark.usefixtures("client") +def test_update_multiple_messages(created_messages): + # Modify the messages + for i, message in enumerate(created_messages): + message.text = f"Updated message {i}" + + updated = update_messages(created_messages) + + assert len(updated) == len(created_messages) + for i, message in enumerate(updated): + assert message.text == f"Updated message {i}" + assert message.id == created_messages[i].id + + +@pytest.mark.usefixtures("client") +def test_update_nonexistent_message(): + # Create a message with a non-existent UUID + message = MessageRead( + id=uuid4(), # Generate a random UUID that won't exist in the database + text="Test message", + sender="User", + sender_name="User", + session_id="session_id", + flow_id=uuid4(), + ) + + updated = update_messages(message) + assert len(updated) == 0 + + +@pytest.mark.usefixtures("client") +def test_update_mixed_messages(created_messages): + # Create a mix of existing and non-existing messages + nonexistent_message = MessageRead( + id=uuid4(), # Generate a random UUID that won't exist in the database + text="Test message", + sender="User", + sender_name="User", + session_id="session_id", + flow_id=uuid4(), + ) + + messages_to_update = created_messages[:1] + [nonexistent_message] + created_messages[0].text = "Updated existing message" + + updated = update_messages(messages_to_update) + + assert len(updated) == 1 + assert updated[0].text == "Updated existing message" + assert updated[0].id == created_messages[0].id + assert isinstance(updated[0].id, UUID) # Verify ID is UUID type + + +@pytest.mark.usefixtures("client") +def test_update_message_with_timestamp(created_message): + # Set a specific timestamp + new_timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + created_message.timestamp = new_timestamp + created_message.text = "Updated message with timestamp" + + updated = update_messages(created_message) + + assert len(updated) == 1 + assert updated[0].text == "Updated message with timestamp" + + # Compare timestamps without timezone info since DB doesn't preserve it + assert updated[0].timestamp.replace(tzinfo=None) == new_timestamp.replace(tzinfo=None) + assert updated[0].id == created_message.id + + +@pytest.mark.usefixtures("client") +def test_update_multiple_messages_with_timestamps(created_messages): + # Modify messages with different timestamps + for i, message in enumerate(created_messages): + message.text = f"Updated message {i}" + message.timestamp = datetime(2024, 1, 1, i, 0, 0, tzinfo=timezone.utc) + + updated = update_messages(created_messages) + + assert len(updated) == len(created_messages) + for i, message in enumerate(updated): + assert message.text == f"Updated message {i}" + # Compare timestamps without timezone info + expected_timestamp = datetime(2024, 1, 1, i, 0, 0, tzinfo=timezone.utc) + assert message.timestamp.replace(tzinfo=None) == expected_timestamp.replace(tzinfo=None) + assert message.id == created_messages[i].id + + +@pytest.mark.usefixtures("client") +def test_update_message_with_content_blocks(created_message): + # Create a content block using proper models + text_content = TextContent( + type="text", text="Test content", duration=5, header={"title": "Test Header", "icon": "TestIcon"} + ) + + tool_content = ToolContent(type="tool_use", name="test_tool", tool_input={"param": "value"}, duration=10) + + content_block = ContentBlock(title="Test Block", contents=[text_content, tool_content], allow_markdown=True) + + created_message.content_blocks = [content_block] + created_message.text = "Message with content blocks" + + updated = update_messages(created_message) + + assert len(updated) == 1 + assert updated[0].text == "Message with content blocks" + assert len(updated[0].content_blocks) == 1 + + # Verify the content block structure + updated_block = updated[0].content_blocks[0] + assert updated_block.title == "Test Block" + assert len(updated_block.contents) == 2 + + # Verify text content + text_content = updated_block.contents[0] + assert text_content.type == "text" + assert text_content.text == "Test content" + assert text_content.duration == 5 + assert text_content.header["title"] == "Test Header" + + # Verify tool content + tool_content = updated_block.contents[1] + assert tool_content.type == "tool_use" + assert tool_content.name == "test_tool" + assert tool_content.tool_input == {"param": "value"} + assert tool_content.duration == 10 + + +@pytest.mark.usefixtures("client") +def test_update_message_with_nested_properties(created_message): + # Create a text content with nested properties + text_content = TextContent( + type="text", text="Test content", header={"title": "Test Header", "icon": "TestIcon"}, duration=15 + ) + + content_block = ContentBlock( + title="Test Properties", + contents=[text_content], + allow_markdown=True, + media_url=["http://example.com/image.jpg"], + ) + + # Set properties according to the Properties model structure + created_message.properties = Properties( + text_color="blue", + background_color="white", + edited=False, + source=Source(id="test_id", display_name="Test Source", source="test"), + icon="TestIcon", + allow_markdown=True, + state="complete", + targets=[], + ) + created_message.text = "Message with nested properties" + created_message.content_blocks = [content_block] + + updated = update_messages(created_message) + + assert len(updated) == 1 + assert updated[0].text == "Message with nested properties" + + # Verify the properties were properly serialized and stored + assert updated[0].properties.text_color == "blue" + assert updated[0].properties.background_color == "white" + assert updated[0].properties.edited is False + assert updated[0].properties.source.id == "test_id" + assert updated[0].properties.source.display_name == "Test Source" + assert updated[0].properties.source.source == "test" + assert updated[0].properties.icon == "TestIcon" + assert updated[0].properties.allow_markdown is True + assert updated[0].properties.state == "complete" + assert updated[0].properties.targets == [] diff --git a/src/backend/tests/unit/test_messages_endpoints.py b/src/backend/tests/unit/test_messages_endpoints.py new file mode 100644 index 000000000000..a5bddee9c2a4 --- /dev/null +++ b/src/backend/tests/unit/test_messages_endpoints.py @@ -0,0 +1,117 @@ +from uuid import UUID + +import pytest +from httpx import AsyncClient +from langflow.memory import add_messagetables + +# Assuming you have these imports available +from langflow.services.database.models.message import MessageCreate, MessageRead, MessageUpdate +from langflow.services.database.models.message.model import MessageTable +from langflow.services.deps import session_scope + + +@pytest.fixture +def created_message(): + with session_scope() as session: + message = MessageCreate(text="Test message", sender="User", sender_name="User", session_id="session_id") + messagetable = MessageTable.model_validate(message, from_attributes=True) + messagetables = add_messagetables([messagetable], session) + return MessageRead.model_validate(messagetables[0], from_attributes=True) + + +@pytest.fixture +def created_messages(session): # noqa: ARG001 + with session_scope() as _session: + messages = [ + MessageCreate(text="Test message 1", sender="User", sender_name="User", session_id="session_id2"), + MessageCreate(text="Test message 2", sender="User", sender_name="User", session_id="session_id2"), + MessageCreate(text="Test message 3", sender="User", sender_name="User", session_id="session_id2"), + ] + messagetables = [MessageTable.model_validate(message, from_attributes=True) for message in messages] + return add_messagetables(messagetables, _session) + + +@pytest.mark.api_key_required +async def test_delete_messages(client: AsyncClient, created_messages, logged_in_headers): + response = await client.request( + "DELETE", "api/v1/monitor/messages", json=[str(msg.id) for msg in created_messages], headers=logged_in_headers + ) + assert response.status_code == 204, response.text + assert response.reason_phrase == "No Content" + + +@pytest.mark.api_key_required +async def test_update_message(client: AsyncClient, logged_in_headers, created_message): + message_id = created_message.id + message_update = MessageUpdate(text="Updated content") + response = await client.put( + f"api/v1/monitor/messages/{message_id}", json=message_update.model_dump(), headers=logged_in_headers + ) + assert response.status_code == 200, response.text + updated_message = MessageRead(**response.json()) + assert updated_message.text == "Updated content" + + +@pytest.mark.api_key_required +async def test_update_message_not_found(client: AsyncClient, logged_in_headers): + non_existent_id = UUID("00000000-0000-0000-0000-000000000000") + message_update = MessageUpdate(text="Updated content") + response = await client.put( + f"api/v1/monitor/messages/{non_existent_id}", json=message_update.model_dump(), headers=logged_in_headers + ) + assert response.status_code == 404, response.text + assert response.json()["detail"] == "Message not found" + + +@pytest.mark.api_key_required +async def test_delete_messages_session(client: AsyncClient, created_messages, logged_in_headers): + session_id = "session_id2" + response = await client.delete(f"api/v1/monitor/messages/session/{session_id}", headers=logged_in_headers) + assert response.status_code == 204 + assert response.reason_phrase == "No Content" + + assert len(created_messages) == 3 + response = await client.get("api/v1/monitor/messages", headers=logged_in_headers) + assert response.status_code == 200 + assert len(response.json()) == 0 + + +# Successfully update session ID for all messages with the old session ID +@pytest.mark.usefixtures("session") +async def test_successfully_update_session_id(client, logged_in_headers, created_messages): + old_session_id = "session_id2" + new_session_id = "new_session_id" + + response = await client.patch( + f"api/v1/monitor/messages/session/{old_session_id}", + params={"new_session_id": new_session_id}, + headers=logged_in_headers, + ) + + assert response.status_code == 200, response.text + updated_messages = response.json() + assert len(updated_messages) == len(created_messages) + for message in updated_messages: + assert message["session_id"] == new_session_id + + response = await client.get( + "api/v1/monitor/messages", headers=logged_in_headers, params={"session_id": new_session_id} + ) + assert response.status_code == 200 + assert len(response.json()) == len(created_messages) + for message in response.json(): + assert message["session_id"] == new_session_id + + +# No messages found with the given session ID +@pytest.mark.usefixtures("session") +async def test_no_messages_found_with_given_session_id(client, logged_in_headers): + old_session_id = "non_existent_session_id" + new_session_id = "new_session_id" + + response = await client.patch( + f"/messages/session/{old_session_id}", params={"new_session_id": new_session_id}, headers=logged_in_headers + ) + + assert response.status_code == 404, response.text + assert response.json()["detail"] == "Not Found" diff --git a/src/backend/tests/unit/test_process.py b/src/backend/tests/unit/test_process.py index 5d0f80d90264..37930d9c2f60 100644 --- a/src/backend/tests/unit/test_process.py +++ b/src/backend/tests/unit/test_process.py @@ -1,4 +1,5 @@ -import pytest +import asyncio + from langflow.processing.process import process_tweaks from langflow.services.deps import get_session_service @@ -262,10 +263,9 @@ def test_tweak_not_in_template(): assert result == graph_data -@pytest.mark.asyncio -async def test_load_langchain_object_with_cached_session(client, basic_graph_data): +async def test_load_langchain_object_with_cached_session(basic_graph_data): # Provide a non-existent session_id - session_service = get_session_service() + session_service = await asyncio.to_thread(get_session_service) session_id1 = "non-existent-session-id" graph1, artifacts1 = await session_service.load_session(session_id1, basic_graph_data) # Use the new session_id to get the langchain_object again @@ -275,29 +275,36 @@ async def test_load_langchain_object_with_cached_session(client, basic_graph_dat assert artifacts1 == artifacts2 -@pytest.mark.asyncio -async def test_load_langchain_object_with_no_cached_session(client, basic_graph_data): - # Provide a non-existent session_id - session_service = get_session_service() - session_id1 = "non-existent-session-id" - session_id = session_service.build_key(session_id1, basic_graph_data) - graph1, artifacts1 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id") - # Clear the cache - await session_service.clear_session(session_id) - # Use the new session_id to get the graph again - graph2, artifacts2 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id") - - # Since the cache was cleared, objects should be different - assert id(graph1) != id(graph2) - +# TODO: Update basic graph data +# async def test_load_langchain_object_with_no_cached_session(client, basic_graph_data): +# # Provide a non-existent session_id +# session_service = get_session_service() +# session_id1 = "non-existent-session-id" +# session_id = session_service.build_key(session_id1, basic_graph_data) +# graph1, artifacts1 = await session_service.load_session( +# session_id, data_graph=basic_graph_data, flow_id="flow_id" +# ) +# # Clear the cache +# await session_service.clear_session(session_id) +# # Use the new session_id to get the graph again +# graph2, artifacts2 = await session_service.load_session( +# session_id, data_graph=basic_graph_data, flow_id="flow_id" +# ) +# +# # Since the cache was cleared, objects should be different +# assert id(graph1) != id(graph2) -@pytest.mark.asyncio -async def test_load_langchain_object_without_session_id(client, basic_graph_data): - # Provide a non-existent session_id - session_service = get_session_service() - session_id1 = None - graph1, artifacts1 = await session_service.load_session(session_id1, data_graph=basic_graph_data, flow_id="flow_id") - # Use the new session_id to get the langchain_object again - graph2, artifacts2 = await session_service.load_session(session_id1, data_graph=basic_graph_data, flow_id="flow_id") - assert graph1 == graph2 +# async def test_load_langchain_object_without_session_id(client, basic_graph_data): +# # Provide a non-existent session_id +# session_service = get_session_service() +# session_id1 = None +# graph1, artifacts1 = await session_service.load_session( +# session_id1, data_graph=basic_graph_data, flow_id="flow_id" +# ) +# # Use the new session_id to get the langchain_object again +# graph2, artifacts2 = await session_service.load_session( +# session_id1, data_graph=basic_graph_data, flow_id="flow_id" +# ) +# +# assert graph1 == graph2 diff --git a/src/backend/tests/unit/test_schema.py b/src/backend/tests/unit/test_schema.py new file mode 100644 index 000000000000..cce9d8bcd3df --- /dev/null +++ b/src/backend/tests/unit/test_schema.py @@ -0,0 +1,178 @@ +from collections.abc import Sequence as SequenceABC +from types import NoneType +from typing import Union + +import pytest +from langflow.schema.data import Data +from langflow.template import Input, Output +from langflow.template.field.base import UNDEFINED +from langflow.type_extraction.type_extraction import post_process_type +from pydantic import ValidationError + + +class TestInput: + def test_field_type_str(self): + input_obj = Input(field_type="str") + assert input_obj.field_type == "str" + + def test_field_type_type(self): + input_obj = Input(field_type=int) + assert input_obj.field_type == "int" + + def test_invalid_field_type(self): + with pytest.raises(ValidationError): + Input(field_type=123) + + def test_serialize_field_type(self): + input_obj = Input(field_type="str") + assert input_obj.serialize_field_type("str", None) == "str" + + def test_validate_type_string(self): + input_obj = Input(field_type="str") + assert input_obj.field_type == "str" + + def test_validate_type_class(self): + input_obj = Input(field_type=int) + assert input_obj.field_type == "int" + + def test_post_process_type_function(self): + # Basic types + assert set(post_process_type(int)) == {int} + assert set(post_process_type(float)) == {float} + + # List and Sequence types + assert set(post_process_type(list[int])) == {int} + assert set(post_process_type(SequenceABC[float])) == {float} + + # Union types + assert set(post_process_type(Union[int, str])) == {int, str} # noqa: UP007 + assert set(post_process_type(Union[int, SequenceABC[str]])) == {int, str} # noqa: UP007 + assert set(post_process_type(Union[int, SequenceABC[int]])) == {int} # noqa: UP007 + + # Nested Union with lists + assert set(post_process_type(Union[list[int], list[str]])) == {int, str} # noqa: UP007 + assert set(post_process_type(Union[int, list[str], list[float]])) == {int, str, float} # noqa: UP007 + + # Custom data types + assert set(post_process_type(Data)) == {Data} + assert set(post_process_type(list[Data])) == {Data} + + # Union with custom types + assert set(post_process_type(Union[Data, str])) == {Data, str} # noqa: UP007 + assert set(post_process_type(Union[Data, int, list[str]])) == {Data, int, str} # noqa: UP007 + + # Empty lists and edge cases + assert set(post_process_type(list)) == {list} + assert set(post_process_type(Union[int, None])) == {int, NoneType} # noqa: UP007 + assert set(post_process_type(Union[None, list[None]])) == {None, NoneType} # noqa: UP007 + + # Handling complex nested structures + assert set(post_process_type(Union[SequenceABC[int | str], list[float]])) == {int, str, float} # noqa: UP007 + assert set(post_process_type(Union[int | list[str] | list[float], str])) == {int, str, float} # noqa: UP007 + + # Non-generic types should return as is + assert set(post_process_type(dict)) == {dict} + assert set(post_process_type(tuple)) == {tuple} + + # Union with custom types + assert set(post_process_type(Union[Data, str])) == {Data, str} # noqa: UP007 + assert set(post_process_type(Data | str)) == {Data, str} + assert set(post_process_type(Data | int | list[str])) == {Data, int, str} + + # More complex combinations with Data + assert set(post_process_type(Data | list[float])) == {Data, float} + assert set(post_process_type(Data | Union[int, str])) == {Data, int, str} # noqa: UP007 + assert set(post_process_type(Data | list[int] | None)) == {Data, int, type(None)} + assert set(post_process_type(Data | Union[float, None])) == {Data, float, type(None)} # noqa: UP007 + + # Multiple Data types combined + assert set(post_process_type(Union[Data, str | float])) == {Data, str, float} # noqa: UP007 + assert set(post_process_type(Union[Data | float | str, int])) == {Data, int, float, str} # noqa: UP007 + + # Testing with nested unions and lists + assert set(post_process_type(Union[list[Data], list[int | str]])) == {Data, int, str} # noqa: UP007 + assert set(post_process_type(Data | list[float | str])) == {Data, float, str} + + def test_input_to_dict(self): + input_obj = Input(field_type="str") + assert input_obj.to_dict() == { + "type": "str", + "required": False, + "placeholder": "", + "list": False, + "show": True, + "multiline": False, + "fileTypes": [], + "file_path": "", + "advanced": False, + "title_case": False, + "dynamic": False, + "info": "", + "input_types": ["Text"], + "load_from_db": False, + } + + +class TestOutput: + def test_output_default(self): + output_obj = Output(name="test_output") + assert output_obj.name == "test_output" + assert output_obj.value == UNDEFINED + assert output_obj.cache is True + + def test_output_add_types(self): + output_obj = Output(name="test_output") + output_obj.add_types(["str", "int"]) + assert output_obj.types == ["str", "int"] + + def test_output_set_selected(self): + output_obj = Output(name="test_output", types=["str", "int"]) + output_obj.set_selected() + assert output_obj.selected == "str" + + def test_output_to_dict(self): + output_obj = Output(name="test_output") + assert output_obj.to_dict() == { + "types": [], + "name": "test_output", + "display_name": "test_output", + "cache": True, + "value": "__UNDEFINED__", + } + + def test_output_validate_display_name(self): + output_obj = Output(name="test_output") + assert output_obj.display_name == "test_output" + + def test_output_validate_model(self): + output_obj = Output(name="test_output", value="__UNDEFINED__") + assert output_obj.validate_model() == output_obj + + +class TestPostProcessType: + def test_int_type(self): + assert post_process_type(int) == [int] + + def test_list_int_type(self): + assert post_process_type(list[int]) == [int] + + def test_union_type(self): + assert set(post_process_type(Union[int, str])) == {int, str} # noqa: UP007 + + def test_custom_type(self): + class CustomType: + pass + + assert post_process_type(CustomType) == [CustomType] + + def test_list_custom_type(self): + class CustomType: + pass + + assert post_process_type(list[CustomType]) == [CustomType] + + def test_union_custom_type(self): + class CustomType: + pass + + assert set(post_process_type(Union[CustomType, int])) == {CustomType, int} # noqa: UP007 diff --git a/src/backend/tests/unit/test_setup_superuser.py b/src/backend/tests/unit/test_setup_superuser.py index c2172429b92c..b8fb1cbd1309 100644 --- a/src/backend/tests/unit/test_setup_superuser.py +++ b/src/backend/tests/unit/test_setup_superuser.py @@ -112,11 +112,11 @@ def test_teardown_superuser_default_superuser(mock_get_session, mock_get_setting @patch("langflow.services.deps.get_settings_service") @patch("langflow.services.deps.get_session") def test_teardown_superuser_no_default_superuser(mock_get_session, mock_get_settings_service): - ADMIN_USER_NAME = "admin_user" + admin_user_name = "admin_user" mock_settings_service = MagicMock() mock_settings_service.auth_settings.AUTO_LOGIN = False - mock_settings_service.auth_settings.SUPERUSER = ADMIN_USER_NAME - mock_settings_service.auth_settings.SUPERUSER_PASSWORD = "password" + mock_settings_service.auth_settings.SUPERUSER = admin_user_name + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = "password" # noqa: S105 mock_get_settings_service.return_value = mock_settings_service mock_session = MagicMock() diff --git a/src/backend/tests/unit/test_telemetry.py b/src/backend/tests/unit/test_telemetry.py index abef3bfd619c..618493c64664 100644 --- a/src/backend/tests/unit/test_telemetry.py +++ b/src/backend/tests/unit/test_telemetry.py @@ -1,8 +1,8 @@ -import pytest import threading -from langflow.services.telemetry.opentelemetry import OpenTelemetry from concurrent.futures import ThreadPoolExecutor, as_completed +import pytest +from langflow.services.telemetry.opentelemetry import OpenTelemetry fixed_labels = {"flow_id": "this_flow_id", "service": "this", "user": "that"} @@ -24,17 +24,17 @@ def test_gauge(opentelemetry_instance): def test_gauge_with_counter_method(opentelemetry_instance): - with pytest.raises(ValueError, match="Metric 'file_uploads' is not a counter"): + with pytest.raises(TypeError, match="Metric 'file_uploads' is not a counter"): opentelemetry_instance.increment_counter(metric_name="file_uploads", value=1, labels=fixed_labels) def test_gauge_with_historgram_method(opentelemetry_instance): - with pytest.raises(ValueError, match="Metric 'file_uploads' is not a histogram"): + with pytest.raises(TypeError, match="Metric 'file_uploads' is not a histogram"): opentelemetry_instance.observe_histogram("file_uploads", 1, fixed_labels) def test_gauge_with_up_down_counter_method(opentelemetry_instance): - with pytest.raises(ValueError, match="Metric 'file_uploads' is not an up down counter"): + with pytest.raises(TypeError, match="Metric 'file_uploads' is not an up down counter"): opentelemetry_instance.up_down_counter("file_uploads", 1, labels=fixed_labels) @@ -72,9 +72,9 @@ def test_missing_labels(opentelemetry_instance): with pytest.raises(ValueError, match="Labels must be provided for the metric"): opentelemetry_instance.up_down_counter("num_files_uploaded", 1, None) with pytest.raises(ValueError, match="Labels must be provided for the metric"): - opentelemetry_instance.update_gauge(metric_name="num_files_uploaded", value=1.0, labels=dict()) + opentelemetry_instance.update_gauge(metric_name="num_files_uploaded", value=1.0, labels={}) with pytest.raises(ValueError, match="Labels must be provided for the metric"): - opentelemetry_instance.observe_histogram("num_files_uploaded", 1, dict()) + opentelemetry_instance.observe_histogram("num_files_uploaded", 1, {}) def test_multithreaded_singleton(): diff --git a/src/backend/tests/unit/test_template.py b/src/backend/tests/unit/test_template.py index c1282b578e7f..6b2127178017 100644 --- a/src/backend/tests/unit/test_template.py +++ b/src/backend/tests/unit/test_template.py @@ -1,25 +1,19 @@ import importlib -from typing import Dict, List, Optional import pytest from langflow.utils.util import build_template_from_function, get_base_classes, get_default_factory from pydantic import BaseModel -@pytest.fixture -def client(): - pass - - # Dummy classes for testing purposes class Parent(BaseModel): - """Parent Class""" + """Parent Class.""" parent_field: str class Child(Parent): - """Child Class""" + """Child Class.""" child_field: int @@ -27,14 +21,14 @@ class Child(Parent): class ExampleClass1(BaseModel): """Example class 1.""" - def __init__(self, data: Optional[List[int]] = None): + def __init__(self, data: list[int] | None = None): self.data = data or [1, 2, 3] class ExampleClass2(BaseModel): """Example class 2.""" - def __init__(self, data: Optional[Dict[str, int]] = None): + def __init__(self, data: dict[str, int] | None = None): self.data = data or {"a": 1, "b": 2, "c": 3} @@ -91,7 +85,7 @@ def dummy_function(): return "default_value" # Add dummy_function to your_module - setattr(importlib.import_module(module_name), "dummy_function", dummy_function) + importlib.import_module(module_name).dummy_function = dummy_function default_value = get_default_factory(module_name, function_repr) diff --git a/src/backend/tests/unit/test_user.py b/src/backend/tests/unit/test_user.py new file mode 100644 index 000000000000..9184a6567afb --- /dev/null +++ b/src/backend/tests/unit/test_user.py @@ -0,0 +1,255 @@ +from datetime import datetime, timezone + +import pytest +from httpx import AsyncClient +from langflow.services.auth.utils import create_super_user, get_password_hash +from langflow.services.database.models.user import UserUpdate +from langflow.services.database.models.user.model import User +from langflow.services.database.utils import session_getter +from langflow.services.deps import get_db_service, get_settings_service +from sqlmodel import select + + +@pytest.fixture +def super_user(client): # noqa: ARG001 + settings_manager = get_settings_service() + auth_settings = settings_manager.auth_settings + with session_getter(get_db_service()) as session: + return create_super_user( + db=session, + username=auth_settings.SUPERUSER, + password=auth_settings.SUPERUSER_PASSWORD, + ) + + +@pytest.fixture +async def super_user_headers( + client: AsyncClient, + super_user, # noqa: ARG001 +): + settings_service = get_settings_service() + auth_settings = settings_service.auth_settings + login_data = { + "username": auth_settings.SUPERUSER, + "password": auth_settings.SUPERUSER_PASSWORD, + } + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 200 + tokens = response.json() + a_token = tokens["access_token"] + return {"Authorization": f"Bearer {a_token}"} + + +@pytest.fixture +def deactivated_user(client): # noqa: ARG001 + with session_getter(get_db_service()) as session: + user = User( + username="deactivateduser", + password=get_password_hash("testpassword"), + is_active=False, + is_superuser=False, + last_login_at=datetime.now(tz=timezone.utc), + ) + session.add(user) + session.commit() + session.refresh(user) + return user + + +async def test_user_waiting_for_approval(client): + username = "waitingforapproval" + password = "testpassword" # noqa: S105 + + # Debug: Check if the user already exists + with session_getter(get_db_service()) as session: + existing_user = session.exec(select(User).where(User.username == username)).first() + if existing_user: + pytest.fail( + f"User {username} already exists before the test. Database URL: {get_db_service().database_url}" + ) + + # Create a user that is not active and has never logged in + with session_getter(get_db_service()) as session: + user = User( + username=username, + password=get_password_hash(password), + is_active=False, + last_login_at=None, + ) + session.add(user) + session.commit() + + login_data = {"username": "waitingforapproval", "password": "testpassword"} + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 400 + assert response.json()["detail"] == "Waiting for approval" + + # Debug: Check if the user still exists after the test + with session_getter(get_db_service()) as session: + existing_user = session.exec(select(User).where(User.username == username)).first() + if existing_user: + pass + else: + pytest.fail(f"User {username} does not exist after the test. This is unexpected.") + + +@pytest.mark.api_key_required +async def test_deactivated_user_cannot_login(client: AsyncClient, deactivated_user): + login_data = {"username": deactivated_user.username, "password": "testpassword"} + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 401, response.json() + assert response.json()["detail"] == "Inactive user", response.text + + +@pytest.mark.usefixtures("deactivated_user") +async def test_deactivated_user_cannot_access(client: AsyncClient, logged_in_headers): + # Assuming the headers for deactivated_user + response = await client.get("api/v1/users/", headers=logged_in_headers) + assert response.status_code == 403, response.status_code + assert response.json()["detail"] == "The user doesn't have enough privileges", response.text + + +@pytest.mark.api_key_required +async def test_data_consistency_after_update(client: AsyncClient, active_user, logged_in_headers, super_user_headers): + user_id = active_user.id + update_data = UserUpdate(is_active=False) + + response = await client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=super_user_headers) + assert response.status_code == 200, response.json() + + # Fetch the updated user from the database + response = await client.get("api/v1/users/whoami", headers=logged_in_headers) + assert response.status_code == 401, response.json() + assert response.json()["detail"] == "User not found or is inactive." + + +@pytest.mark.api_key_required +async def test_data_consistency_after_delete(client: AsyncClient, test_user, super_user_headers): + user_id = test_user.get("id") + response = await client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 200, response.json() + + # Attempt to fetch the deleted user from the database + response = await client.get("api/v1/users/", headers=super_user_headers) + assert response.status_code == 200 + assert all(user["id"] != user_id for user in response.json()["users"]) + + +@pytest.mark.api_key_required +async def test_inactive_user(client: AsyncClient): + # Create a user that is not active and has a last_login_at value + with session_getter(get_db_service()) as session: + user = User( + username="inactiveuser", + password=get_password_hash("testpassword"), + is_active=False, + last_login_at=datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc), + ) + session.add(user) + session.commit() + + login_data = {"username": "inactiveuser", "password": "testpassword"} + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 401 + assert response.json()["detail"] == "Inactive user" + + +@pytest.mark.api_key_required +def test_add_user(test_user): + assert test_user["username"] == "testuser" + + +@pytest.mark.api_key_required +async def test_read_all_users(client: AsyncClient, super_user_headers): + response = await client.get("api/v1/users/", headers=super_user_headers) + assert response.status_code == 200, response.json() + assert isinstance(response.json()["users"], list) + + +@pytest.mark.api_key_required +async def test_normal_user_cant_read_all_users(client: AsyncClient, logged_in_headers): + response = await client.get("api/v1/users/", headers=logged_in_headers) + assert response.status_code == 403, response.json() + assert response.json() == {"detail": "The user doesn't have enough privileges"} + + +@pytest.mark.api_key_required +async def test_patch_user(client: AsyncClient, active_user, logged_in_headers): + user_id = active_user.id + update_data = UserUpdate( + username="newname", + ) + + response = await client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + assert response.status_code == 200, response.json() + update_data = UserUpdate( + profile_image="new_image", + ) + + response = await client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + assert response.status_code == 200, response.json() + + +@pytest.mark.api_key_required +async def test_patch_reset_password(client: AsyncClient, active_user, logged_in_headers): + user_id = active_user.id + update_data = UserUpdate( + password="newpassword", # noqa: S106 + ) + + response = await client.patch( + f"/api/v1/users/{user_id}/reset-password", + json=update_data.model_dump(), + headers=logged_in_headers, + ) + assert response.status_code == 200, response.json() + # Now we need to test if the new password works + login_data = {"username": active_user.username, "password": "newpassword"} + response = await client.post("api/v1/login", data=login_data) + assert response.status_code == 200 + + +@pytest.mark.api_key_required +@pytest.mark.usefixtures("active_user") +async def test_patch_user_wrong_id(client: AsyncClient, logged_in_headers): + user_id = "wrong_id" + update_data = UserUpdate( + username="newname", + ) + + response = await client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + assert response.status_code == 422, response.json() + json_response = response.json() + detail = json_response["detail"] + error = detail[0] + assert error["loc"] == ["path", "user_id"] + assert error["type"] == "uuid_parsing" + + +@pytest.mark.api_key_required +async def test_delete_user(client: AsyncClient, test_user, super_user_headers): + user_id = test_user["id"] + response = await client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 200 + assert response.json() == {"detail": "User deleted"} + + +@pytest.mark.api_key_required +@pytest.mark.usefixtures("test_user") +async def test_delete_user_wrong_id(client: AsyncClient, super_user_headers): + user_id = "wrong_id" + response = await client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 422 + json_response = response.json() + detail = json_response["detail"] + error = detail[0] + assert error["loc"] == ["path", "user_id"] + assert error["type"] == "uuid_parsing" + + +@pytest.mark.api_key_required +async def test_normal_user_cant_delete_user(client: AsyncClient, test_user, logged_in_headers): + user_id = test_user["id"] + response = await client.delete(f"/api/v1/users/{user_id}", headers=logged_in_headers) + assert response.status_code == 403 + assert response.json() == {"detail": "The user doesn't have enough privileges"} diff --git a/src/backend/tests/unit/test_validate_code.py b/src/backend/tests/unit/test_validate_code.py index 6d4e5c4215e7..4470b3efebfc 100644 --- a/src/backend/tests/unit/test_validate_code.py +++ b/src/backend/tests/unit/test_validate_code.py @@ -2,15 +2,15 @@ from unittest import mock import pytest +from langflow.utils.validate import ( + create_class, + create_function, + execute_function, + extract_function_name, + validate_code, +) from requests.exceptions import MissingSchema -from langflow.utils.validate import create_function, execute_function, extract_function_name, validate_code - - -@pytest.fixture -def client(): - pass - def test_create_function(): code = """ @@ -104,6 +104,69 @@ def test_execute_function_missing_schema(): def my_function(x): return requests.get(x).text """ - with mock.patch("requests.get", side_effect=MissingSchema): - with pytest.raises(MissingSchema): - execute_function(code, "my_function", "invalid_url") + with mock.patch("requests.get", side_effect=MissingSchema), pytest.raises(MissingSchema): + execute_function(code, "my_function", "invalid_url") + + +def test_create_class(): + code = """ +from langflow.custom import CustomComponent + +class ExternalClass: + def __init__(self, value): + self.value = value + +class MyComponent(CustomComponent): + def build(self): + return ExternalClass("test") +""" + class_name = "MyComponent" + created_class = create_class(code, class_name) + instance = created_class() + result = instance.build() + assert result.value == "test" + + +def test_create_class_with_multiple_external_classes(): + code = """ +from langflow.custom import CustomComponent + +class ExternalClass1: + def __init__(self, value): + self.value = value + +class ExternalClass2: + def __init__(self, value): + self.value = value + +class MyComponent(CustomComponent): + def build(self): + return ExternalClass1("test1"), ExternalClass2("test2") +""" + class_name = "MyComponent" + created_class = create_class(code, class_name) + instance = created_class() + result1, result2 = instance.build() + assert result1.value == "test1" + assert result2.value == "test2" + + +def test_create_class_with_external_variables_and_functions(): + code = """ +from langflow.custom import CustomComponent + +external_variable = "external_value" + +def external_function(): + return "external_function_value" + +class MyComponent(CustomComponent): + def build(self): + return external_variable, external_function() +""" + class_name = "MyComponent" + created_class = create_class(code, class_name) + instance = created_class() + result_variable, result_function = instance.build() + assert result_variable == "external_value" + assert result_function == "external_function_value" diff --git a/src/backend/tests/unit/test_version.py b/src/backend/tests/unit/test_version.py index 36ac7c5a480e..d068b52987cd 100644 --- a/src/backend/tests/unit/test_version.py +++ b/src/backend/tests/unit/test_version.py @@ -9,9 +9,9 @@ def test_version(): def test_compute_main(): - assert "1.0.10" == _compute_non_prerelease_version("1.0.10.post0") - assert "1.0.10" == _compute_non_prerelease_version("1.0.10.a1") - assert "1.0.10" == _compute_non_prerelease_version("1.0.10.b112") - assert "1.0.10" == _compute_non_prerelease_version("1.0.10.rc0") - assert "1.0.10" == _compute_non_prerelease_version("1.0.10.dev9") - assert "1.0.10" == _compute_non_prerelease_version("1.0.10") + assert _compute_non_prerelease_version("1.0.10.post0") == "1.0.10" + assert _compute_non_prerelease_version("1.0.10.a1") == "1.0.10" + assert _compute_non_prerelease_version("1.0.10.b112") == "1.0.10" + assert _compute_non_prerelease_version("1.0.10.rc0") == "1.0.10" + assert _compute_non_prerelease_version("1.0.10.dev9") == "1.0.10" + assert _compute_non_prerelease_version("1.0.10") == "1.0.10" diff --git a/src/backend/tests/unit/test_webhook.py b/src/backend/tests/unit/test_webhook.py new file mode 100644 index 000000000000..2c38c8fa7aa8 --- /dev/null +++ b/src/backend/tests/unit/test_webhook.py @@ -0,0 +1,59 @@ +import tempfile +from pathlib import Path + +import pytest + + +@pytest.fixture(autouse=True) +def _check_openai_api_key_in_environment_variables(): + pass + + +async def test_webhook_endpoint(client, added_webhook_test): + # The test is as follows: + # 1. The flow when run will get a "path" from the payload and save a file with the path as the name. + # We will create a temporary file path and send it to the webhook endpoint, then check if the file exists. + # 2. we will delete the file, then send an invalid payload to the webhook endpoint and check if the file exists. + endpoint_name = added_webhook_test["endpoint_name"] + endpoint = f"api/v1/webhook/{endpoint_name}" + # Create a temporary file + with tempfile.TemporaryDirectory() as tmp: + file_path = Path(tmp) / "test_file.txt" + + payload = {"path": str(file_path)} + + response = await client.post(endpoint, json=payload) + assert response.status_code == 202 + assert file_path.exists() + + assert not file_path.exists() + + # Send an invalid payload + payload = {"invalid_key": "invalid_value"} + response = await client.post(endpoint, json=payload) + assert response.status_code == 202 + assert not file_path.exists() + + +async def test_webhook_flow_on_run_endpoint(client, added_webhook_test, created_api_key): + endpoint_name = added_webhook_test["endpoint_name"] + endpoint = f"api/v1/run/{endpoint_name}?stream=false" + # Just test that "Random Payload" returns 202 + # returns 202 + payload = { + "output_type": "any", + } + response = await client.post(endpoint, headers={"x-api-key": created_api_key.api_key}, json=payload) + assert response.status_code == 200, response.json() + + +async def test_webhook_with_random_payload(client, added_webhook_test): + endpoint_name = added_webhook_test["endpoint_name"] + endpoint = f"api/v1/webhook/{endpoint_name}" + # Just test that "Random Payload" returns 202 + # returns 202 + response = await client.post( + endpoint, + json="Random Payload", + ) + assert response.status_code == 202 diff --git a/src/backend/tests/unit/utils/__init__.py b/src/backend/tests/unit/utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/backend/tests/unit/utils/test_connection_string_parser.py b/src/backend/tests/unit/utils/test_connection_string_parser.py index 1ab82279e5f2..7492b4162c62 100644 --- a/src/backend/tests/unit/utils/test_connection_string_parser.py +++ b/src/backend/tests/unit/utils/test_connection_string_parser.py @@ -2,13 +2,8 @@ from langflow.utils.connection_string_parser import transform_connection_string -@pytest.fixture -def client(): - pass - - @pytest.mark.parametrize( - "connection_string, expected", + ("connection_string", "expected"), [ ("protocol:user:password@host", "protocol:user:password@host"), ("protocol:user@host", "protocol:user@host"), diff --git a/src/backend/tests/unit/utils/test_format_directory_path.py b/src/backend/tests/unit/utils/test_format_directory_path.py new file mode 100644 index 000000000000..16ff40080b88 --- /dev/null +++ b/src/backend/tests/unit/utils/test_format_directory_path.py @@ -0,0 +1,46 @@ +import pytest +from langflow.base.data.utils import format_directory_path + + +@pytest.mark.parametrize( + ("input_path", "expected"), + [ + # Test case 1: Standard path with no newlines (no change expected) + ("/home/user/documents/file.txt", "/home/user/documents/file.txt"), + # Test case 2: Path with newline character (replace \n with \\n) + ("/home/user/docu\nments/file.txt", "/home/user/docu\\nments/file.txt"), + # Test case 3: Path with multiple newline characters + ("/home/user/\ndocu\nments/file.txt", "/home/user/\\ndocu\\nments/file.txt"), + # Test case 4: Path with only newline characters + ("\n\n\n", "\\n\\n\\n"), + # Test case 5: Empty path (as per the original function, this remains an empty string) + ("", ""), + # Test case 6: Path with mixed newlines and other special characters + ("/home/user/my-\ndocs/special_file!.pdf", "/home/user/my-\\ndocs/special_file!.pdf"), + # Test case 7: Windows-style path with newline + ("C:\\Users\\\nDocuments\\file.txt", "C:\\Users\\\\nDocuments\\file.txt"), # No conversion of backslashes + # Test case 8: Path with trailing newline + ("/home/user/documents/\n", "/home/user/documents/\\n"), + # Test case 9: Path with leading newline + ("\n/home/user/documents/", "\\n/home/user/documents/"), + # Test case 10: Path with multiple consecutive newlines + ("/home/user/docu\n\nments/file.txt", "/home/user/docu\\n\\nments/file.txt"), + # Test case 11: Windows-style path (backslashes remain unchanged) + ("C:\\Users\\Documents\\file.txt", "C:\\Users\\Documents\\file.txt"), + # Test case 12: Windows path with trailing backslash + ("C:\\Users\\Documents\\", "C:\\Users\\Documents\\"), + # Test case 13: Mixed separators (leave as is) + ("C:/Users\\Documents/file.txt", "C:/Users\\Documents/file.txt"), + # Test case 14: Network path (UNC) (leave backslashes as is) + ("\\\\server\\share\\file.txt", "\\\\server\\share\\file.txt"), + ], +) +def test_format_directory_path(input_path, expected): + result = format_directory_path(input_path) + assert result == expected + + +# Additional test for type checking +def test_format_directory_path_type(): + result = format_directory_path("/home/user/file.txt") + assert isinstance(result, str) diff --git a/src/backend/tests/unit/utils/test_image_utils.py b/src/backend/tests/unit/utils/test_image_utils.py new file mode 100644 index 000000000000..f1f38c40bb65 --- /dev/null +++ b/src/backend/tests/unit/utils/test_image_utils.py @@ -0,0 +1,66 @@ +import base64 + +import pytest +from langflow.utils.image import convert_image_to_base64, create_data_url + + +@pytest.fixture +def sample_image(tmp_path): + """Create a sample image file for testing.""" + image_path = tmp_path / "test_image.png" + # Create a small black 1x1 pixel PNG file + image_content = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAACklEQVR4nGMAAQAABQABDQottAAAAABJRU5ErkJggg==" + ) + image_path.write_bytes(image_content) + return image_path + + +class TestImageUtils: + def test_convert_image_to_base64_success(self, sample_image): + """Test successful conversion of image to base64.""" + base64_str = convert_image_to_base64(sample_image) + assert isinstance(base64_str, str) + # Verify it's valid base64 + assert base64.b64decode(base64_str) + + def test_convert_image_to_base64_empty_path(self): + """Test conversion with empty path.""" + with pytest.raises(ValueError, match="Image path cannot be empty"): + convert_image_to_base64("") + + def test_convert_image_to_base64_nonexistent_file(self): + """Test conversion with non-existent file.""" + with pytest.raises(FileNotFoundError, match="Image file not found"): + convert_image_to_base64("nonexistent.png") + + def test_convert_image_to_base64_directory(self, tmp_path): + """Test conversion with directory path instead of file.""" + with pytest.raises(ValueError, match="Path is not a file"): + convert_image_to_base64(tmp_path) + + def test_create_data_url_success(self, sample_image): + """Test successful creation of data URL.""" + data_url = create_data_url(sample_image) + assert data_url.startswith("data:image/png;base64,") + # Verify the base64 part is valid + base64_part = data_url.split(",")[1] + assert base64.b64decode(base64_part) + + def test_create_data_url_with_custom_mime(self, sample_image): + """Test creation of data URL with custom MIME type.""" + custom_mime = "image/custom" + data_url = create_data_url(sample_image, mime_type=custom_mime) + assert data_url.startswith(f"data:{custom_mime};base64,") + + def test_create_data_url_invalid_file(self): + """Test creation of data URL with invalid file.""" + with pytest.raises(FileNotFoundError): + create_data_url("nonexistent.jpg") + + def test_create_data_url_unrecognized_extension(self, tmp_path): + """Test creation of data URL with unrecognized file extension.""" + invalid_file = tmp_path / "test.unknown" + invalid_file.touch() + with pytest.raises(ValueError, match="Could not determine MIME type"): + create_data_url(invalid_file) diff --git a/src/backend/tests/unit/utils/test_rewrite_file_path.py b/src/backend/tests/unit/utils/test_rewrite_file_path.py new file mode 100644 index 000000000000..bb30280e2b8a --- /dev/null +++ b/src/backend/tests/unit/utils/test_rewrite_file_path.py @@ -0,0 +1,38 @@ +import pytest +from langflow.base.data.utils import format_directory_path + + +@pytest.mark.parametrize( + ("input_path", "expected"), + [ + # Test case 1: Standard path with no newlines + ("/home/user/documents/file.txt", "/home/user/documents/file.txt"), + # Test case 2: Path with newline character + ("/home/user/docu\nments/file.txt", "/home/user/docu\\nments/file.txt"), + # Test case 3: Path with multiple newline characters + ("/home/user/\ndocu\nments/file.txt", "/home/user/\\ndocu\\nments/file.txt"), + # Test case 4: Path with only newline characters + ("\n\n\n", "\\n\\n\\n"), + # Test case 5: Empty path + ("", ""), + # Test case 6: Path with mixed newlines and other special characters + ("/home/user/my-\ndocs/special_file!.pdf", "/home/user/my-\\ndocs/special_file!.pdf"), + # Test case 7: Windows-style path with newline + ("C:\\Users\\\nDocuments\\file.txt", "C:\\Users\\\\nDocuments\\file.txt"), + # Test case 8: Path with trailing newline + ("/home/user/documents/\n", "/home/user/documents/\\n"), + # Test case 9: Path with leading newline + ("\n/home/user/documents/", "\\n/home/user/documents/"), + # Test case 10: Path with multiple consecutive newlines + ("/home/user/docu\n\nments/file.txt", "/home/user/docu\\n\\nments/file.txt"), + ], +) +def test_format_directory_path(input_path, expected): + result = format_directory_path(input_path) + assert result == expected + + +# Additional test for type checking +def test_format_directory_path_type(): + result = format_directory_path("/home/user/file.txt") + assert isinstance(result, str) diff --git a/src/backend/tests/unit/utils/test_truncate_long_strings.py b/src/backend/tests/unit/utils/test_truncate_long_strings.py new file mode 100644 index 000000000000..aa7ce3f958fd --- /dev/null +++ b/src/backend/tests/unit/utils/test_truncate_long_strings.py @@ -0,0 +1,56 @@ +import math + +import pytest +from langflow.utils.util_strings import truncate_long_strings + + +@pytest.mark.parametrize( + ("input_data", "max_length", "expected"), + [ + # Test case 1: String shorter than max_length + ("short string", 20, "short string"), + # Test case 2: String exactly at max_length + ("exact", 5, "exact"), + # Test case 3: String longer than max_length + ("long string", 7, "long st..."), + # Test case 4: Empty string + ("", 5, ""), + # Test case 5: Single character string + ("a", 1, "a"), + # Test case 6: Unicode string + ("こんにちは", 3, "こんに..."), + # Test case 7: Integer input + (12345, 3, 12345), + # Test case 8: Float input + (math.pi, 4, math.pi), + # Test case 9: Boolean input + (True, 2, True), + # Test case 10: None input + (None, 5, None), + # Test case 11: Very long string + ("a" * 1000, 10, "a" * 10 + "..."), + ], +) +def test_truncate_long_strings_non_dict_list(input_data, max_length, expected): + result = truncate_long_strings(input_data, max_length) + assert result == expected + + +# Test for max_length of 0 +def test_truncate_long_strings_zero_max_length(): + assert truncate_long_strings("any string", 0) == "..." + + +# Test for negative max_length +def test_truncate_long_strings_negative_max_length(): + assert truncate_long_strings("any string", -1) == "any string" + + +# Test for None max_length (should use default MAX_TEXT_LENGTH) +def test_truncate_long_strings_none_max_length(): + from langflow.utils.constants import MAX_TEXT_LENGTH + + long_string = "a" * (MAX_TEXT_LENGTH + 10) + result = truncate_long_strings(long_string, None) + assert len(result) == MAX_TEXT_LENGTH + 3 # +3 for "..." + assert result == "a" * MAX_TEXT_LENGTH + "..." diff --git a/src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py b/src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py new file mode 100644 index 000000000000..eafc3f10b140 --- /dev/null +++ b/src/backend/tests/unit/utils/test_truncate_long_strings_on_objects.py @@ -0,0 +1,98 @@ +import pytest +from langflow.utils.constants import MAX_TEXT_LENGTH +from langflow.utils.util_strings import truncate_long_strings + + +@pytest.mark.parametrize( + ("input_data", "max_length", "expected"), + [ + # Test case 1: Simple string truncation + ({"key": "a" * 100}, 10, {"key": "a" * 10 + "..."}), + # Test case 2: Nested dictionary + ({"outer": {"inner": "b" * 100}}, 5, {"outer": {"inner": "b" * 5 + "..."}}), + # Test case 3: List of strings + (["short", "a" * 100, "also short"], 7, ["short", "a" * 7 + "...", "also sh" + "..."]), + # Test case 4: Mixed nested structure + ( + {"key1": ["a" * 100, {"nested": "b" * 100}], "key2": "c" * 100}, + 8, + {"key1": ["a" * 8 + "...", {"nested": "b" * 8 + "..."}], "key2": "c" * 8 + "..."}, + ), + # Test case 5: Empty structures + ({}, 10, {}), + ([], 10, []), + # Test case 6: Strings at exact max_length + ({"exact": "a" * 10}, 10, {"exact": "a" * 10}), + # Test case 7: Non-string values + ({"num": 12345, "bool": True, "none": None}, 5, {"num": 12345, "bool": True, "none": None}), + # Test case 8: Unicode characters + ({"unicode": "こんにちは世界"}, 3, {"unicode": "こんに..."}), + # Test case 9: Very large structure + ( + {"key" + str(i): "value" * i for i in range(1000)}, + 10, + {"key" + str(i): ("value" * i)[:10] + "..." if len("value" * i) > 10 else "value" * i for i in range(1000)}, + ), + ], +) +def test_truncate_long_strings(input_data, max_length, expected): + result = truncate_long_strings(input_data, max_length) + assert result == expected + + +def test_truncate_long_strings_default_max_length(): + long_string = "a" * (MAX_TEXT_LENGTH + 1) + input_data = {"key": long_string} + result = truncate_long_strings(input_data) + assert len(result["key"]) == MAX_TEXT_LENGTH + 3 # +3 for the "..." + + +def test_truncate_long_strings_no_modification(): + input_data = {"short": "short string", "nested": {"also_short": "another short string"}} + result = truncate_long_strings(input_data, 100) + assert result == input_data + + +# Test for type preservation +def test_truncate_long_strings_type_preservation(): + input_data = {"str": "a" * 100, "list": ["b" * 100], "dict": {"nested": "c" * 100}} + result = truncate_long_strings(input_data, 10) + assert isinstance(result, dict) + assert isinstance(result["str"], str) + assert isinstance(result["list"], list) + assert isinstance(result["dict"], dict) + + +# Test for in-place modification +def test_truncate_long_strings_in_place_modification(): + input_data = {"key": "a" * 100} + result = truncate_long_strings(input_data, 10) + assert result is input_data # Check if the same object is returned + + +# Test for invalid input +def test_truncate_long_strings_invalid_input(): + input_string = "not a dict or list" + result = truncate_long_strings(input_string, 10) + assert result == "not a dict..." # The function should truncate the string + + +# Updated test for negative max_length +def test_truncate_long_strings_negative_max_length(): + input_data = {"key": "value"} + result = truncate_long_strings(input_data, -1) + assert result == input_data # Assuming the function ignores negative max_length + + +# Additional test for zero max_length +def test_truncate_long_strings_zero_max_length(): + input_data = {"key": "value"} + result = truncate_long_strings(input_data, 0) + assert result == {"key": "..."} # Assuming the function truncates to just "..." + + +# Test for very small positive max_length +def test_truncate_long_strings_small_max_length(): + input_data = {"key": "value"} + result = truncate_long_strings(input_data, 1) + assert result == {"key": "v..."} # Assuming the function keeps at least one character diff --git a/src/frontend/.dspy_cache/cache.db b/src/frontend/.dspy_cache/cache.db new file mode 100644 index 000000000000..76b1b2425720 Binary files /dev/null and b/src/frontend/.dspy_cache/cache.db differ diff --git a/src/frontend/favicon-new.ico b/src/frontend/favicon-new.ico new file mode 100644 index 000000000000..0eee3936800b Binary files /dev/null and b/src/frontend/favicon-new.ico differ diff --git a/src/frontend/favicon.ico b/src/frontend/favicon.ico deleted file mode 100644 index 136c4c835821..000000000000 Binary files a/src/frontend/favicon.ico and /dev/null differ diff --git a/src/frontend/feature-config.json b/src/frontend/feature-config.json deleted file mode 100644 index d5a3a37b82e2..000000000000 --- a/src/frontend/feature-config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "ENABLE_DARK_MODE": true, - "ENABLE_API": true, - "ENABLE_LANGFLOW_STORE": true, - "ENABLE_PROFILE_ICONS": true, - "ENABLE_SOCIAL_LINKS": true, - "ENABLE_BRANDING": true -} \ No newline at end of file diff --git a/src/frontend/index.html b/src/frontend/index.html index 7837ad39f37a..494043b3c371 100644 --- a/src/frontend/index.html +++ b/src/frontend/index.html @@ -1,10 +1,17 @@ + - + + + + documentation.", mode: "html", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "html", - code: codes[5], - }, - ]; - - if (includeWebhookCurl) { - tabs.splice(1, 0, { - name: "Webhook cURL", - mode: "bash", - image: "https://curl.se/logo/curl-symbol-transparent.png", - language: "sh", - code: codes[1], + code: codes.widgetCode, }); } - if (includeTweaks) { tabs.push({ name: "Tweaks", mode: "python", image: "https://cdn-icons-png.flaticon.com/512/5968/5968350.png", language: "py", - code: codes[6], + code: codes.tweaksCode, }); } diff --git a/src/frontend/src/modals/baseModal/helpers/switch-case-size.ts b/src/frontend/src/modals/baseModal/helpers/switch-case-size.ts index e2505a495ccc..23196ce8f00c 100644 --- a/src/frontend/src/modals/baseModal/helpers/switch-case-size.ts +++ b/src/frontend/src/modals/baseModal/helpers/switch-case-size.ts @@ -38,6 +38,11 @@ export const switchCaseModalSize = (size: string) => { minWidth = "min-w-[85vw]"; height = "h-[80vh]"; break; + case "templates": + minWidth = "w-[97vw] max-w-[1200px]"; + height = + "min-h-[700px] lg:min-h-0 h-[90vh] md:h-[80vh] lg:h-[50vw] lg:max-h-[620px]"; + break; case "three-cards": minWidth = "min-w-[1066px]"; height = "max-h-[94vh]"; @@ -61,6 +66,12 @@ export const switchCaseModalSize = (size: string) => { minWidth = "min-w-[80vw]"; height = ""; break; + + case "x-large": + minWidth = "min-w-[95vw]"; + height = "h-[95vh]"; + break; + default: minWidth = "min-w-[80vw]"; height = "h-[90vh]"; diff --git a/src/frontend/src/modals/baseModal/index.tsx b/src/frontend/src/modals/baseModal/index.tsx index 947b70d4d451..ffeebaca8ce7 100644 --- a/src/frontend/src/modals/baseModal/index.tsx +++ b/src/frontend/src/modals/baseModal/index.tsx @@ -22,7 +22,11 @@ import { modalHeaderType } from "../../types/components"; import { cn } from "../../utils/utils"; import { switchCaseModalSize } from "./helpers/switch-case-size"; -type ContentProps = { children: ReactNode; overflowHidden?: boolean }; +type ContentProps = { + children: ReactNode; + overflowHidden?: boolean; + className?: string; +}; type HeaderProps = { children: ReactNode; description: string }; type FooterProps = { children: ReactNode }; type TriggerProps = { @@ -32,12 +36,17 @@ type TriggerProps = { className?: string; }; -const Content: React.FC = ({ children, overflowHidden }) => { +const Content: React.FC = ({ + children, + overflowHidden, + className, +}) => { return (
{children} @@ -68,10 +77,10 @@ const Header: React.FC<{ }> = ({ children, description }: modalHeaderType): JSX.Element => { return ( - + {children} - + {description} @@ -89,9 +98,16 @@ const Footer: React.FC<{ onClick?: () => void; }; close?: boolean; -}> = ({ children, submit, close }) => { + centered?: boolean; +}> = ({ children, submit, close, centered }) => { return ( -
+
{submit ? (
{children ??
} @@ -125,12 +141,14 @@ const Footer: React.FC<{ ); }; interface BaseModalProps { - children: [ - React.ReactElement, - React.ReactElement, - React.ReactElement?, - React.ReactElement?, - ]; + children: + | [ + React.ReactElement, + React.ReactElement?, + React.ReactElement?, + React.ReactElement?, + ] + | React.ReactElement; open?: boolean; setOpen?: (open: boolean) => void; size?: @@ -143,13 +161,15 @@ interface BaseModalProps { | "three-cards" | "large-thin" | "large-h-full" + | "templates" | "small-h-full" | "medium-h-full" | "md-thin" | "sm-thin" | "smaller-h-full" - | "medium-log"; - + | "medium-log" + | "x-large"; + className?: string; disable?: boolean; onChangeOpenModal?: (open?: boolean) => void; type?: "modal" | "dialog"; @@ -157,6 +177,7 @@ interface BaseModalProps { onEscapeKeyDown?: (e: KeyboardEvent) => void; } function BaseModal({ + className, open, setOpen, children, @@ -189,7 +210,7 @@ function BaseModal({ const modalContent = ( <> - {headerChild} + {headerChild && headerChild} {ContentChild} {ContentFooter && ContentFooter} @@ -199,6 +220,7 @@ function BaseModal({ minWidth, height, "flex flex-col duration-300 overflow-hidden", + className, ); //UPDATE COLORS AND STYLE CLASSSES @@ -213,6 +235,7 @@ function BaseModal({ {triggerChild} event.preventDefault()} onEscapeKeyDown={onEscapeKeyDown} className={contentClasses} > diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index 9a6943576c10..ce80932d515d 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -52,11 +52,11 @@ export default function CodeAreaModal({ const setErrorData = useAlertStore((state) => state.setErrorData); const [openConfirmation, setOpenConfirmation] = useState(false); const codeRef = useRef(null); + const { mutate, isPending } = usePostValidateCode(); const [error, setError] = useState<{ detail: CodeErrorDataTypeAPI; } | null>(null); - const { mutate: validateCode } = usePostValidateCode(); const { mutate: validateComponentCode } = usePostValidateComponentCode(); useEffect(() => { @@ -68,7 +68,7 @@ export default function CodeAreaModal({ }, []); function processNonDynamicField() { - validateCode( + mutate( { code }, { onSuccess: (apiReturn) => { @@ -184,6 +184,7 @@ export default function CodeAreaModal({ }} open={open} setOpen={setOpen} + size="x-large" > {children} @@ -220,7 +221,7 @@ export default function CodeAreaModal({ onChange={(value) => { setCode(value); }} - className="h-full w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600" + className="h-full min-w-full rounded-lg border-[1px] border-gray-300 custom-scroll dark:border-gray-600" />
{ + setOpenConfirmation(false); + }} onEscapeKeyDown={(e) => { e.stopPropagation(); setOpenConfirmation(false); diff --git a/src/frontend/src/modals/confirmationModal/index.tsx b/src/frontend/src/modals/confirmationModal/index.tsx index e22f1d1625b9..76f7be2bda99 100644 --- a/src/frontend/src/modals/confirmationModal/index.tsx +++ b/src/frontend/src/modals/confirmationModal/index.tsx @@ -1,4 +1,5 @@ import GenericIconComponent from "@/components/genericIconComponent"; +import { DialogClose } from "@radix-ui/react-dialog"; import React, { useEffect, useState } from "react"; import ShadTooltip from "../../components/shadTooltipComponent"; import { Button } from "../../components/ui/button"; @@ -66,6 +67,16 @@ function ConfirmationModal({ (child) => (child as React.ReactElement).type === Content, ); + const shouldShowConfirm = confirmationText && onConfirm; + const shouldShowCancel = cancelText; + const shouldShowFooter = shouldShowConfirm || shouldShowCancel; + + const handleCancel = () => { + setFlag(true); + setModalOpen(false); + onCancel?.(); + }; + return ( {triggerChild} @@ -89,34 +100,38 @@ function ConfirmationModal({ {ContentChild} - - - {cancelText && onCancel && ( - - )} - + {shouldShowFooter ? ( + + {shouldShowConfirm && ( + + )} + {shouldShowCancel && ( + + + + )} + + ) : ( + <> + )} ); } diff --git a/src/frontend/src/modals/dictAreaModal/index.tsx b/src/frontend/src/modals/dictAreaModal/index.tsx index cfb040c923de..41a2e2a70263 100644 --- a/src/frontend/src/modals/dictAreaModal/index.tsx +++ b/src/frontend/src/modals/dictAreaModal/index.tsx @@ -27,58 +27,68 @@ export default function DictAreaModal({ }): JSX.Element { const [open, setOpen] = useState(false); const isDark = useDarkStore((state) => state.dark); - const [myValue, setMyValue] = useState(value); + const [componentValue, setComponentValue] = useState(value); useEffect(() => { - setMyValue(value); + setComponentValue(value); }, [value, open]); + const handleSubmit = () => { + if (onChange) { + onChange(componentValue); + setOpen(false); + } + }; + + const handleJsonChange = (edit) => { + setComponentValue(edit.src); + }; + + const customizeCopy = (copy) => { + navigator.clipboard.writeText(JSON.stringify(copy)); + }; + + const renderHeader = () => ( + + + {onChange ? "Edit Dictionary" : "View Dictionary"} + + + ); + + const renderContent = () => ( + +
+ +
+
+ ); + return ( { - onChange(myValue); - setOpen(false); - } - : undefined - } + onSubmit={onChange ? handleSubmit : undefined} > - {children} - - - {onChange ? "Edit Dictionary" : "View Dictionary"} - - - -
- { - setMyValue(edit.src); - }} - src={cloneDeep(myValue)} - customizeCopy={(copy) => { - navigator.clipboard.writeText(JSON.stringify(copy)); - }} - /> -
-
+ + {children} + + {renderHeader()} + {renderContent()}
); diff --git a/src/frontend/src/modals/editNodeModal/index.tsx b/src/frontend/src/modals/editNodeModal/index.tsx index 4a6ab0675066..17dcaaf0ba63 100644 --- a/src/frontend/src/modals/editNodeModal/index.tsx +++ b/src/frontend/src/modals/editNodeModal/index.tsx @@ -31,7 +31,7 @@ const EditNodeModal = ({ }, [data.node]); return ( - + <> diff --git a/src/frontend/src/modals/exportModal/index.tsx b/src/frontend/src/modals/exportModal/index.tsx index 9689170faa69..c4cb29613e9e 100644 --- a/src/frontend/src/modals/exportModal/index.tsx +++ b/src/frontend/src/modals/exportModal/index.tsx @@ -1,3 +1,4 @@ +import { track } from "@/customization/utils/analytics"; import useFlowStore from "@/stores/flowStore"; import { ReactNode, forwardRef, useEffect, useState } from "react"; import EditFlowSettings from "../../components/editFlowSettingsComponent"; @@ -66,6 +67,7 @@ const ExportModal = forwardRef( description, ); setOpen(false); + track("Flow Exported", { flowId: currentFlow!.id }); }} > {props.children} diff --git a/src/frontend/src/modals/flowLogsModal/index.tsx b/src/frontend/src/modals/flowLogsModal/index.tsx index 9094d5ea8c2e..31debbf71fa2 100644 --- a/src/frontend/src/modals/flowLogsModal/index.tsx +++ b/src/frontend/src/modals/flowLogsModal/index.tsx @@ -31,7 +31,7 @@ export default function FlowLogsModal({ }, [data, open, isLoading]); return ( - +
diff --git a/src/frontend/src/modals/flowSettingsModal/index.tsx b/src/frontend/src/modals/flowSettingsModal/index.tsx index fb0435fc0620..15e2672d22c0 100644 --- a/src/frontend/src/modals/flowSettingsModal/index.tsx +++ b/src/frontend/src/modals/flowSettingsModal/index.tsx @@ -65,8 +65,7 @@ export default function FlowSettingsModal({ if (flows) { const tempNameList: string[] = []; flows.forEach((flow: FlowType) => { - if ((flow.is_component ?? false) === false) - tempNameList.push(flow.name); + tempNameList.push(flow.name); }); setNameList(tempNameList.filter((name) => name !== currentFlow!.name)); } diff --git a/src/frontend/src/modals/newFlowModal/components/NewFlowCardComponent/index.tsx b/src/frontend/src/modals/newFlowModal/components/NewFlowCardComponent/index.tsx index ed7885f48bc2..9a9d74d971a1 100644 --- a/src/frontend/src/modals/newFlowModal/components/NewFlowCardComponent/index.tsx +++ b/src/frontend/src/modals/newFlowModal/components/NewFlowCardComponent/index.tsx @@ -1,5 +1,7 @@ +import { useCustomNavigate } from "@/customization/hooks/use-custom-navigate"; +import { track } from "@/customization/utils/analytics"; import useAddFlow from "@/hooks/flows/use-add-flow"; -import { useNavigate, useParams } from "react-router-dom"; +import { useParams } from "react-router-dom"; import { Card, CardContent, @@ -9,16 +11,19 @@ import { export default function NewFlowCardComponent() { const addFlow = useAddFlow(); - const navigate = useNavigate(); + const navigate = useCustomNavigate(); const { folderId } = useParams(); + const handleClick = () => { + addFlow({ new_blank: true }).then((id) => { + navigate(`/flow/${id}${folderId ? `/folder/${folderId}` : ""}`); + }); + track("New Flow Created", { template: "Blank Flow" }); + }; + return ( { - addFlow().then((id) => { - navigate(`/flow/${id}${folderId ? `/folder/${folderId}` : ""}`); - }); - }} + onClick={handleClick} className="h-64 w-80 cursor-pointer bg-background pt-4" data-testid="blank-flow" > diff --git a/src/frontend/src/modals/newFlowModal/components/hooks/use-redirect-flow-card-click.tsx b/src/frontend/src/modals/newFlowModal/components/hooks/use-redirect-flow-card-click.tsx new file mode 100644 index 000000000000..d0ba71b4c2f9 --- /dev/null +++ b/src/frontend/src/modals/newFlowModal/components/hooks/use-redirect-flow-card-click.tsx @@ -0,0 +1,24 @@ +import { useNavigate } from "react-router-dom"; +import { track } from "../../../../customization/utils/analytics"; +import useAddFlow from "../../../../hooks/flows/use-add-flow"; +import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; +import { FlowType } from "../../../../types/flow"; +import { updateIds } from "../../../../utils/reactflowUtils"; + +export function useFlowCardClick() { + const navigate = useNavigate(); + const addFlow = useAddFlow(); + + const handleFlowCardClick = async (flow: FlowType, folderIdUrl: string) => { + try { + updateIds(flow.data!); + const id = await addFlow({ flow }); + navigate(`/flow/${id}/folder/${folderIdUrl}`); + track("New Flow Created", { template: `${flow.name} Template` }); + } catch (error) { + console.error("Error handling flow card click:", error); + } + }; + + return handleFlowCardClick; +} diff --git a/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx b/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx index d0a5cd785458..afecfcc5deb2 100644 --- a/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx +++ b/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx @@ -1,5 +1,5 @@ /// -import { useNavigate, useParams } from "react-router-dom"; +import { useParams } from "react-router-dom"; import BlogPost from "../../../../assets/undraw_blog_post_re_fy5x.svg?react"; import ChatBot from "../../../../assets/undraw_chat_bot_re_e2gj.svg?react"; import PromptChaining from "../../../../assets/undraw_cloud_docs_re_xjht.svg?react"; @@ -10,7 +10,6 @@ import APIRequest from "../../../../assets/undraw_real_time_analytics_re_yliv.sv import BasicPrompt from "../../../../assets/undraw_short_bio_re_fmx0.svg?react"; import TransferFiles from "../../../../assets/undraw_transfer_files_re_a2a9.svg?react"; -import useAddFlow from "@/hooks/flows/use-add-flow"; import { Card, CardContent, @@ -20,17 +19,17 @@ import { import { useFolderStore } from "../../../../stores/foldersStore"; import { UndrawCardComponentProps } from "../../../../types/components"; import { updateIds } from "../../../../utils/reactflowUtils"; +import { useFlowCardClick } from "../hooks/use-redirect-flow-card-click"; export default function UndrawCardComponent({ flow, }: UndrawCardComponentProps): JSX.Element { - const addFlow = useAddFlow(); - const navigate = useNavigate(); const { folderId } = useParams(); const myCollectionId = useFolderStore((state) => state.myCollectionId); - const folderIdUrl = folderId ?? myCollectionId; + const handleFlowCardClick = useFlowCardClick(); + function selectImage() { switch (flow.name) { case "Blog Writer": @@ -93,7 +92,7 @@ export default function UndrawCardComponent({ preserveAspectRatio="xMidYMid meet" /> ); - case "Sequential Tasks Agent": + case "Simple Agent": return ( ); - case "Hierarchical Tasks Agent": + case "Travel Planning Agents": return ( ); - case "Complex Agent": + case "Dynamic Agent": return ( { - updateIds(flow.data!); - addFlow({ flow }).then((id) => { - navigate(`/flow/${id}/folder/${folderIdUrl}`); - }); - }} + onClick={() => handleFlowCardClick(flow, folderIdUrl!)} className="h-64 w-80 cursor-pointer bg-background pt-4" > diff --git a/src/frontend/src/modals/newFlowModal/index.tsx b/src/frontend/src/modals/newFlowModal/index.tsx deleted file mode 100644 index 444795a45f40..000000000000 --- a/src/frontend/src/modals/newFlowModal/index.tsx +++ /dev/null @@ -1,86 +0,0 @@ -import useFlowsManagerStore from "../../stores/flowsManagerStore"; -import { newFlowModalPropsType } from "../../types/components"; -import BaseModal from "../baseModal"; -import NewFlowCardComponent from "./components/NewFlowCardComponent"; -import UndrawCardComponent from "./components/undrawCards"; - -export default function NewFlowModal({ - open, - setOpen, -}: newFlowModalPropsType): JSX.Element { - const examples = useFlowsManagerStore((state) => state.examples); - - return ( - - - - Get Started - - - -
-
- - - {examples.find( - (e) => e.name == "Basic Prompting (Hello, World)", - ) && ( - e.name == "Basic Prompting (Hello, World)", - )! - } - /> - )} - {examples.find((e) => e.name == "Memory Chatbot") && ( - e.name == "Memory Chatbot")!} - /> - )} - {examples.find((e) => e.name == "Document QA") && ( - e.name == "Document QA")!} - /> - )} - {examples.find((e) => e.name == "Blog Writer") && ( - e.name == "Blog Writer")!} - /> - )} - {examples.find((e) => e.name == "Vector Store RAG") && ( - e.name == "Vector Store RAG")!} - /> - )} - {examples.find((e) => e.name == "Sequential Tasks Agent") && ( - e.name == "Sequential Tasks Agent")!} - /> - )} - {examples.find((e) => e.name == "Hierarchical Tasks Agent") && ( - e.name == "Hierarchical Tasks Agent")! - } - /> - )} - {examples.find((e) => e.name == "Complex Agent") && ( - e.name == "Complex Agent")!} - /> - )} -
-
-
-
- ); -} diff --git a/src/frontend/src/modals/promptModal/index.tsx b/src/frontend/src/modals/promptModal/index.tsx index 1736a2ef51f0..c27b530edb7e 100644 --- a/src/frontend/src/modals/promptModal/index.tsx +++ b/src/frontend/src/modals/promptModal/index.tsx @@ -1,5 +1,5 @@ import { usePostValidatePrompt } from "@/controllers/API/queries/nodes/use-post-validate-prompt"; -import { useEffect, useRef, useState } from "react"; +import React, { useEffect, useRef, useState } from "react"; import IconComponent from "../../components/genericIconComponent"; import SanitizedHTMLWrapper from "../../components/sanitizedHTMLWrapper"; import ShadTooltip from "../../components/shadTooltipComponent"; @@ -47,6 +47,10 @@ export default function PromptModal({ const divRef = useRef(null); const divRefPrompt = useRef(null); const { mutate: postValidatePrompt } = usePostValidatePrompt(); + const [clickPosition, setClickPosition] = useState({ x: 0, y: 0 }); + const [scrollPosition, setScrollPosition] = useState(0); + const previewRef = useRef(null); + const textareaRef = useRef(null); function checkVariables(valueToCheck: string): void { const regex = /\{([^{}]+)\}/g; @@ -131,8 +135,8 @@ export default function PromptModal({ field_name = Array.isArray( apiReturn?.frontend_node?.custom_fields?.[""], ) - ? apiReturn?.frontend_node?.custom_fields?.[""][0] ?? "" - : apiReturn?.frontend_node?.custom_fields?.[""] ?? ""; + ? (apiReturn?.frontend_node?.custom_fields?.[""][0] ?? "") + : (apiReturn?.frontend_node?.custom_fields?.[""] ?? ""); } if (apiReturn) { let inputVariables = apiReturn.input_variables ?? []; @@ -172,11 +176,43 @@ export default function PromptModal({ ); } + const handlePreviewClick = (e: React.MouseEvent) => { + if (!isEdit && !readonly) { + const clickX = e.clientX; + const clickY = e.clientY; + setClickPosition({ x: clickX, y: clickY }); + setScrollPosition(e.currentTarget.scrollTop); + setIsEdit(true); + } + }; + + useEffect(() => { + if (isEdit && textareaRef.current) { + textareaRef.current.focus(); + textareaRef.current.scrollTop = scrollPosition; + + const textArea = textareaRef.current; + const { x, y } = clickPosition; + + // Use caretPositionFromPoint to get the closest text position. Does not work on Safari. + if ("caretPositionFromPoint" in document) { + let range = (document as any).caretPositionFromPoint(x, y)?.offset ?? 0; + if (range) { + const position = range; + textArea.setSelectionRange(position, position); + } + } + } else if (!isEdit && previewRef.current) { + previewRef.current.scrollTop = scrollPosition; + } + }, [isEdit, clickPosition, scrollPosition]); + return ( {}} open={modalOpen} setOpen={setModalOpen} + size="x-large" > {children} @@ -201,10 +237,11 @@ export default function PromptModal({