diff --git a/.github/release.yml b/.github/release.yml
new file mode 100644
index 000000000000..fd78aad923cd
--- /dev/null
+++ b/.github/release.yml
@@ -0,0 +1,35 @@
+changelog:
+ categories:
+ - title: 🚨 Breaking Changes
+ description: Changes that break existing functionality
+ labels:
+ - breaking
+ - title: ✨ New Features
+ description: New features and enhancements
+ labels:
+ - enhancement
+ - title: 🐛 Bug Fixes
+ description: Bug fixes and patches
+ labels:
+ - fix
+ - bug
+ - title: 📝 Documentation Updates
+ description: Changes to documentation
+ labels:
+ - documentation
+ - title: 🛠 Maintenance Tasks
+ description: Maintenance tasks and housekeeping
+ labels:
+ - chore
+ - refactor
+ - style
+ - performance
+ - build
+ - title: ✅ Tests
+ description: Changes to tests
+ labels:
+ - test
+ - title: Others
+ description: Other changes
+ labels:
+ - "*"
diff --git a/.github/workflows/auto-update.yml b/.github/workflows/auto-update.yml
index eea2f09efc43..1d46fa1758fd 100644
--- a/.github/workflows/auto-update.yml
+++ b/.github/workflows/auto-update.yml
@@ -10,4 +10,4 @@ jobs:
name: Auto-update
runs-on: ubuntu-latest
steps:
- - uses: tibdex/auto-update@v2
\ No newline at end of file
+ - uses: tibdex/auto-update@v2
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ca9fd48b1dab..f00a30efe10a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -8,90 +8,99 @@ on:
description: "(Optional) Branch to checkout"
required: false
type: string
+ openai_api_key:
+ description: "OpenAI API Key"
+ required: false
+ type: string
+ store_api_key:
+ description: "Store API Key"
+ required: false
+ type: string
pull_request:
+
+
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
- path-filter:
- name: Filter Paths
- runs-on: ubuntu-latest
- outputs:
- python: ${{ steps.filter.outputs.python }}
- frontend: ${{ steps.filter.outputs.frontend }}
- docs: ${{ steps.filter.outputs.docs }}
- tests: ${{ steps.filter.outputs.tests }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.branch || github.ref }}
- - name: Filter Paths
- id: filter
- uses: dorny/paths-filter@v3
- with:
- filters: |
- python:
- - "src/backend/**"
- - "src/backend/**.py"
- - "pyproject.toml"
- - "poetry.lock"
- - "**/python_test.yml"
- tests:
- - "tests/**"
- - "src/frontend/tests/**"
- frontend:
- - "src/frontend/**"
- - "**/typescript_test.yml"
- docs:
- - "docs/**"
-
- test-backend:
- needs: path-filter
- name: Run Backend Tests
- if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }}
- uses: ./.github/workflows/python_test.yml
-
-
-
- test-frontend:
- needs: path-filter
- name: Run Frontend Tests
- if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.frontend == 'true' || needs.path-filter.outputs.tests == 'true' }}
- uses: ./.github/workflows/typescript_test.yml
+ path-filter:
+ name: Filter Paths
+ runs-on: ubuntu-latest
+ outputs:
+ python: ${{ steps.filter.outputs.python }}
+ frontend: ${{ steps.filter.outputs.frontend }}
+ docs: ${{ steps.filter.outputs.docs }}
+ tests: ${{ steps.filter.outputs.tests }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.branch || github.ref }}
+ - name: Filter Paths
+ id: filter
+ uses: dorny/paths-filter@v3
+ with:
+ filters: |
+ python:
+ - "src/backend/**"
+ - "src/backend/**.py"
+ - "pyproject.toml"
+ - "poetry.lock"
+ - "**/python_test.yml"
+ tests:
+ - "tests/**"
+ - "src/frontend/tests/**"
+ frontend:
+ - "src/frontend/**"
+ - "**/typescript_test.yml"
+ docs:
+ - "docs/**"
+ test-backend:
+ needs: path-filter
+ name: Run Backend Tests
+ if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }}
+ uses: ./.github/workflows/python_test.yml
- lint-backend:
- needs: path-filter
- if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }}
- name: Lint Backend
- uses: ./.github/workflows/lint-py.yml
- # Run only if there are python files changed
+ test-frontend:
+ needs: path-filter
+ name: Run Frontend Tests
+ if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.frontend == 'true' || needs.path-filter.outputs.tests == 'true' }}
+ uses: ./.github/workflows/typescript_test.yml
+ secrets:
+ OPENAI_API_KEY: "${{ secrets.OPENAI_API_KEY }}"
+ STORE_API_KEY: "${{ secrets.STORE_API_KEY }}"
- test-docs-build:
- needs: path-filter
- if: ${{ needs.path-filter.outputs.docs == 'true' }}
- name: Test Docs Build
- uses: ./.github/workflows/docs_test.yml
+ lint-backend:
+ needs: path-filter
+ if: ${{ needs.path-filter.outputs.python == 'true' || needs.path-filter.outputs.tests == 'true' }}
+ name: Lint Backend
+ uses: ./.github/workflows/lint-py.yml
+ # Run only if there are python files changed
+ test-docs-build:
+ needs: path-filter
+ if: ${{ needs.path-filter.outputs.docs == 'true' }}
+ name: Test Docs Build
+ uses: ./.github/workflows/docs_test.yml
- # https://github.com/langchain-ai/langchain/blob/master/.github/workflows/check_diffs.yml
- ci_success:
- name: "CI Success"
- needs: [test-backend, test-frontend, lint-backend, test-docs-build]
- if: |
- always()
- runs-on: ubuntu-latest
- env:
- JOBS_JSON: ${{ toJSON(needs) }}
- RESULTS_JSON: ${{ toJSON(needs.*.result) }}
- EXIT_CODE: ${{!contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') && '0' || '1'}}
- steps:
- - name: "CI Success"
- run: |
- echo $JOBS_JSON
- echo $RESULTS_JSON
- echo "Exiting with $EXIT_CODE"
- exit $EXIT_CODE
\ No newline at end of file
+ # https://github.com/langchain-ai/langchain/blob/master/.github/workflows/check_diffs.yml
+ ci_success:
+ name: "CI Success"
+ needs: [test-backend, test-frontend, lint-backend, test-docs-build]
+ if: |
+ always()
+ runs-on: ubuntu-latest
+ env:
+ JOBS_JSON: ${{ toJSON(needs) }}
+ RESULTS_JSON: ${{ toJSON(needs.*.result) }}
+ EXIT_CODE: ${{!contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') && '0' || '1'}}
+ steps:
+ - name: "CI Success"
+ run: |
+ echo $JOBS_JSON
+ echo $RESULTS_JSON
+ echo "Exiting with $EXIT_CODE"
+ exit $EXIT_CODE
diff --git a/.github/workflows/conventional-labels.yml b/.github/workflows/conventional-labels.yml
new file mode 100644
index 000000000000..0f89f9723476
--- /dev/null
+++ b/.github/workflows/conventional-labels.yml
@@ -0,0 +1,31 @@
+# Warning, do not check out untrusted code with
+# the pull_request_target event.
+name: Label PRs with Conventional Commits
+on:
+ pull_request_target:
+ types: [ opened, edited , reopened]
+
+jobs:
+ validate-pr:
+ name: Validate PR
+ runs-on: ubuntu-latest
+ outputs:
+ validate_output: ${{ steps.validate.outputs }}
+ steps:
+ - name: Validate the pull request
+ id: validate
+ uses: Namchee/conventional-pr@v0.15.4
+ with:
+ access_token: ${{ secrets.GITHUB_TOKEN }}
+ verbose: true
+ issue: false
+
+ label:
+ needs: validate-pr
+ name: Label PR
+ runs-on: ubuntu-latest
+ if: ${{ github.event.pull_request.user.type != 'Bot' && needs.validate-pr.outputs.validate_output == 'true' }}
+ steps:
+ - uses: bcoe/conventional-release-labels@v1
+ with:
+ type_labels: '{"feat": "enhancement","fix": "bug","docs": "documentation","style": "style","refactor": "refactor","perf": "performance","test": "test","chore": "chore","build": "build"}'
diff --git a/.github/workflows/docs_test.yml b/.github/workflows/docs_test.yml
index 2792017b46cf..e0adfda800e9 100644
--- a/.github/workflows/docs_test.yml
+++ b/.github/workflows/docs_test.yml
@@ -12,7 +12,6 @@ on:
env:
NODE_VERSION: "21"
-
jobs:
test-docs-build:
name: Test Docs Build
diff --git a/.github/workflows/js_autofix.yml b/.github/workflows/js_autofix.yml
index 205bc2c01538..0a90190a0be9 100644
--- a/.github/workflows/js_autofix.yml
+++ b/.github/workflows/js_autofix.yml
@@ -2,7 +2,6 @@ name: autofix.ci
on:
pull_request:
- types: [opened, synchronize, reopened]
paths:
- "src/frontend/**"
@@ -43,4 +42,4 @@ jobs:
cd src/frontend
npm run format
- - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
\ No newline at end of file
+ - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
diff --git a/.github/workflows/lint-js.yml b/.github/workflows/lint-js.yml
index d7f79738dacd..892347312f3d 100644
--- a/.github/workflows/lint-js.yml
+++ b/.github/workflows/lint-js.yml
@@ -51,5 +51,3 @@ jobs:
run: |
cd src/frontend
npm run check-format
-
-
diff --git a/.github/workflows/py_autofix.yml b/.github/workflows/py_autofix.yml
index 0ce1eecd475e..28a84fb88e12 100644
--- a/.github/workflows/py_autofix.yml
+++ b/.github/workflows/py_autofix.yml
@@ -1,7 +1,6 @@
name: autofix.ci
on:
pull_request:
- types: [opened, synchronize, reopened, auto_merge_enabled]
paths:
- "poetry.lock"
- "pyproject.toml"
@@ -12,18 +11,12 @@ env:
jobs:
lint:
- name: Run Mypy
+ name: Run Ruff Check and Format
runs-on: ubuntu-latest
- strategy:
- matrix:
- python-version:
- - "3.12"
- - "3.11"
- - "3.10"
steps:
- uses: actions/checkout@v4
- uses: install-pinned/ruff@b52a71f70b28264686d57d1efef1ba845b9cec6c
- run: ruff check --fix-only .
- run: ruff format .
- - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
\ No newline at end of file
+ - uses: autofix-ci/action@dd55f44df8f7cdb7a6bf74c78677eb8acd40cd0a
diff --git a/.github/workflows/style-check-py.yml b/.github/workflows/style-check-py.yml
index a7886b95006d..dfab075ce67e 100644
--- a/.github/workflows/style-check-py.yml
+++ b/.github/workflows/style-check-py.yml
@@ -33,6 +33,3 @@ jobs:
run: echo "::add-matcher::.github/workflows/matchers/ruff.json"
- name: Run Ruff Check
run: poetry run ruff check --output-format=github .
-
-
-
diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml
index b7a48bb02f7c..b7beb96d763d 100644
--- a/.github/workflows/typescript_test.yml
+++ b/.github/workflows/typescript_test.yml
@@ -2,6 +2,11 @@ name: Run Frontend Tests
on:
workflow_call:
+ secrets:
+ OPENAI_API_KEY:
+ required: true
+ STORE_API_KEY:
+ required: true
workflow_dispatch:
inputs:
branch:
@@ -9,7 +14,6 @@ on:
required: false
type: string
-
env:
POETRY_VERSION: "1.8.3"
NODE_VERSION: "21"
@@ -25,11 +29,11 @@ jobs:
strategy:
fail-fast: false
matrix:
- shardIndex: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
- shardTotal: [10]
+ shardIndex: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
+ shardTotal: [14]
env:
- OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- STORE_API_KEY: ${{ secrets.STORE_API_KEY }}
+ OPENAI_API_KEY: ${{ inputs.openai_api_key || secrets.OPENAI_API_KEY }}
+ STORE_API_KEY: ${{ inputs.store_api_key || secrets.STORE_API_KEY }}
steps:
- name: Checkout code
uses: actions/checkout@v4
diff --git a/Makefile b/Makefile
index 0b03489ab593..64a57b4398a8 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,12 @@
.PHONY: all init format lint build build_frontend install_frontend run_frontend run_backend dev help tests coverage
all: help
+VERSION=$(shell grep "^version" pyproject.toml | sed 's/.*\"\(.*\)\"$$/\1/')
+DOCKERFILE=docker/build_and_push.Dockerfile
+DOCKERFILE_BACKEND=docker/build_and_push_backend.Dockerfile
+DOCKERFILE_FRONTEND=docker/frontend/build_and_push_frontend.Dockerfile
+DOCKER_COMPOSE=docker_example/docker-compose.yml
+
log_level ?= debug
host ?= 0.0.0.0
port ?= 7860
@@ -270,6 +276,55 @@ else
endif
+docker_build: dockerfile_build clear_dockerimage ## build DockerFile
+
+
+docker_build_backend: dockerfile_build_be clear_dockerimage ## build Backend DockerFile
+
+
+docker_build_frontend: dockerfile_build_fe clear_dockerimage ## build Frontend Dockerfile
+
+
+dockerfile_build:
+ @echo 'BUILDING DOCKER IMAGE: ${DOCKERFILE}'
+ @docker build --rm \
+ -f ${DOCKERFILE} \
+ -t langflow:${VERSION} .
+
+
+dockerfile_build_be: dockerfile_build
+ @echo 'BUILDING DOCKER IMAGE BACKEND: ${DOCKERFILE_BACKEND}'
+ @docker build --rm \
+ --build-arg LANGFLOW_IMAGE=langflow:${VERSION} \
+ -f ${DOCKERFILE_BACKEND} \
+ -t langflow_backend:${VERSION} .
+
+
+dockerfile_build_fe: dockerfile_build
+ @echo 'BUILDING DOCKER IMAGE FRONTEND: ${DOCKERFILE_FRONTEND}'
+ @docker build --rm \
+ --build-arg LANGFLOW_IMAGE=langflow:${VERSION} \
+ -f ${DOCKERFILE_FRONTEND} \
+ -t langflow_frontend:${VERSION} .
+
+
+clear_dockerimage:
+ @echo 'Clearing the docker build'
+ @if docker images -f "dangling=true" -q | grep -q '.*'; then \
+ docker rmi $$(docker images -f "dangling=true" -q); \
+ fi
+
+
+docker_compose_up: docker_build docker_compose_down
+ @echo 'Running docker compose up'
+ docker compose -f $(DOCKER_COMPOSE) up --remove-orphans
+
+
+docker_compose_down:
+ @echo 'Running docker compose down'
+ docker compose -f $(DOCKER_COMPOSE) down || true
+
+
lock_base:
cd src/backend/base && poetry lock
diff --git a/README.md b/README.md
index c21ab76350bb..ebeaac40d250 100644
--- a/README.md
+++ b/README.md
@@ -92,9 +92,11 @@ results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!")
# Deploy
## DataStax Langflow
+
DataStax Langflow is a hosted version of Langflow integrated with [AstraDB](https://www.datastax.com/products/datastax-astra). Be up and running in minutes with no installation or setup required. [Sign up for free](https://langflow.datastax.com).
## Deploy Langflow on Hugging Face Spaces
+
You can also preview Langflow in [HuggingFace Spaces](https://huggingface.co/spaces/Langflow/Langflow-Preview). [Clone the space using this link](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) to create your own Langflow workspace in minutes.
## Deploy Langflow on Google Cloud Platform
@@ -121,7 +123,6 @@ Use this template to deploy Langflow 1.0 on Railway:
Follow our step-by-step guide to deploy [Langflow on Kubernetes](https://github.com/langflow-ai/langflow/blob/dev/docs/docs/deployment/kubernetes.md).
-
# 🖥️ Command Line Interface (CLI)
Langflow provides a command-line interface (CLI) for easy management and configuration.
diff --git a/docker/build_and_push.Dockerfile b/docker/build_and_push.Dockerfile
index 448a802e6797..aeb948b5f5c1 100644
--- a/docker/build_and_push.Dockerfile
+++ b/docker/build_and_push.Dockerfile
@@ -96,4 +96,4 @@ WORKDIR /app
ENV LANGFLOW_HOST=0.0.0.0
ENV LANGFLOW_PORT=7860
-ENTRYPOINT ["python", "-m", "langflow", "run"]
\ No newline at end of file
+CMD ["python", "-m", "langflow", "run"]
diff --git a/docker/build_and_push_backend.Dockerfile b/docker/build_and_push_backend.Dockerfile
index 2efbefb103bc..66cf65212548 100644
--- a/docker/build_and_push_backend.Dockerfile
+++ b/docker/build_and_push_backend.Dockerfile
@@ -5,4 +5,5 @@ ARG LANGFLOW_IMAGE
FROM $LANGFLOW_IMAGE
RUN rm -rf /app/.venv/langflow/frontend
-CMD ["--backend-only"]
+
+CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860", "--backend-only"]
diff --git a/docker/build_and_push_base.Dockerfile b/docker/build_and_push_base.Dockerfile
index d539c1ae8b6c..a996ec4a63af 100644
--- a/docker/build_and_push_base.Dockerfile
+++ b/docker/build_and_push_base.Dockerfile
@@ -98,4 +98,4 @@ RUN python -m pip install /app/src/backend/base/dist/*.tar.gz --user
ENV LANGFLOW_HOST=0.0.0.0
ENV LANGFLOW_PORT=7860
-ENTRYPOINT ["python", "-m", "langflow", "run"]
+CMD ["python", "-m", "langflow", "run"]
diff --git a/docker_example/docker-compose.yml b/docker_example/docker-compose.yml
index 61c02e65ead2..7e7e3406486d 100644
--- a/docker_example/docker-compose.yml
+++ b/docker_example/docker-compose.yml
@@ -10,7 +10,6 @@ services:
environment:
- LANGFLOW_DATABASE_URL=postgresql://langflow:langflow@postgres:5432/langflow
# This variable defines where the logs, file storage, monitor data and secret keys are stored.
- - LANGFLOW_CONFIG_DIR=/app/langflow
volumes:
- langflow-data:/app/langflow
diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py
index e193425684c3..ecd85748ed9d 100644
--- a/src/backend/base/langflow/api/v1/chat.py
+++ b/src/backend/base/langflow/api/v1/chat.py
@@ -111,6 +111,7 @@ async def retrieve_vertices_order(
# which duplicates the results
for vertex_id in first_layer:
graph.remove_from_predecessors(vertex_id)
+ graph.remove_vertex_from_runnables(vertex_id)
# Now vertices is a list of lists
# We need to get the id of each vertex
@@ -160,7 +161,7 @@ async def build_vertex(
Args:
flow_id (str): The ID of the flow.
vertex_id (str): The ID of the vertex to build.
- background_tasks (BackgroundTasks): The background tasks object for logging.
+ background_tasks (BackgroundTasks): The background tasks dependency.
inputs (Optional[InputValueRequest], optional): The input values for the vertex. Defaults to None.
chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service).
current_user (Any, optional): The current user dependency. Defaults to Depends(get_current_active_user).
@@ -211,8 +212,6 @@ async def build_vertex(
)
top_level_vertices = graph.run_manager.get_top_level_vertices(graph, next_runnable_vertices)
- result_data_response = ResultDataResponse(**result_dict.model_dump())
-
result_data_response = ResultDataResponse.model_validate(result_dict, from_attributes=True)
except Exception as exc:
if isinstance(exc, ComponentBuildException):
@@ -265,7 +264,7 @@ async def build_vertex(
if graph.stop_vertex and graph.stop_vertex in next_runnable_vertices:
next_runnable_vertices = [graph.stop_vertex]
- if not next_runnable_vertices:
+ if not graph.run_manager.vertices_to_run and not next_runnable_vertices:
background_tasks.add_task(graph.end_all_traces)
build_response = VertexBuildResponse(
diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py
index a7908ae21726..8a1656a587f6 100644
--- a/src/backend/base/langflow/components/data/APIRequest.py
+++ b/src/backend/base/langflow/components/data/APIRequest.py
@@ -112,10 +112,18 @@ async def make_request(
if method not in ["GET", "POST", "PATCH", "PUT", "DELETE"]:
raise ValueError(f"Unsupported method: {method}")
+ if isinstance(body, str) and body:
+ try:
+ body = json.loads(body)
+ except Exception as e:
+ logger.error(f"Error decoding JSON data: {e}")
+ body = None
+ raise ValueError(f"Error decoding JSON data: {e}")
+
data = body if body else None
- payload = json.dumps(data) if data else None
+
try:
- response = await client.request(method, url, headers=headers, content=payload, timeout=timeout)
+ response = await client.request(method, url, headers=headers, json=data, timeout=timeout)
try:
result = response.json()
except Exception:
diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py
index a0f84292e45b..cd761bbffad4 100644
--- a/src/backend/base/langflow/graph/graph/base.py
+++ b/src/backend/base/langflow/graph/graph/base.py
@@ -1236,9 +1236,11 @@ def get_successors(vertex, recursive=True):
stack.append(successor.id)
else:
excluded.add(successor.id)
- elif current_id not in stop_predecessors:
+ elif current_id not in stop_predecessors and is_start:
# If the current vertex is not the target vertex, we should add all its successors
# to the stack if they are not in visited
+
+ # If we are starting from the beginning, we should add all successors
for successor in current_vertex.successors:
if successor.id not in visited:
stack.append(successor.id)
@@ -1469,6 +1471,9 @@ def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]
def remove_from_predecessors(self, vertex_id: str):
self.run_manager.remove_from_predecessors(vertex_id)
+ def remove_vertex_from_runnables(self, vertex_id: str):
+ self.run_manager.remove_vertex_from_runnables(vertex_id)
+
def build_in_degree(self, edges: List[ContractEdge]) -> Dict[str, int]:
in_degree: Dict[str, int] = defaultdict(int)
for edge in edges:
diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py
index 9c657de8d06d..a3ad19dc225d 100644
--- a/src/backend/base/langflow/graph/vertex/base.py
+++ b/src/backend/base/langflow/graph/vertex/base.py
@@ -280,7 +280,12 @@ def _build_params(self):
# we don't know the key of the dict but we need to set the value
# to the vertex that is the source of the edge
param_dict = template_dict[param_key]["value"]
- params[param_key] = {key: self.graph.get_vertex(edge.source_id) for key in param_dict.keys()}
+ if param_dict:
+ params[param_key] = {
+ key: self.graph.get_vertex(edge.source_id) for key in param_dict.keys()
+ }
+ else:
+ params[param_key] = self.graph.get_vertex(edge.source_id)
else:
params[param_key] = self.graph.get_vertex(edge.source_id)
diff --git a/src/backend/base/langflow/inputs/inputs.py b/src/backend/base/langflow/inputs/inputs.py
index b88c5db7a27e..4578660dc60e 100644
--- a/src/backend/base/langflow/inputs/inputs.py
+++ b/src/backend/base/langflow/inputs/inputs.py
@@ -253,7 +253,7 @@ class NestedDictInput(BaseInputMixin, ListableInputMixin, MetadataTraceMixin, In
"""
field_type: Optional[SerializableFieldTypes] = FieldTypes.NESTED_DICT
- value: Optional[dict] = {}
+ value: Optional[dict | Data] = {}
class DictInput(BaseInputMixin, ListableInputMixin, InputTraceMixin):
diff --git a/src/backend/base/langflow/schema/artifact.py b/src/backend/base/langflow/schema/artifact.py
index 97d449f7b5e2..dce70f790092 100644
--- a/src/backend/base/langflow/schema/artifact.py
+++ b/src/backend/base/langflow/schema/artifact.py
@@ -49,7 +49,7 @@ def get_artifact_type(value, build_result=None) -> str:
def post_process_raw(raw, artifact_type: str):
if artifact_type == ArtifactType.STREAM.value:
raw = ""
- elif artifact_type == ArtifactType.UNKNOWN.value:
+ elif artifact_type == ArtifactType.UNKNOWN.value and raw is not None:
raw = "Built Successfully ✨"
return raw
diff --git a/src/backend/base/langflow/services/telemetry/service.py b/src/backend/base/langflow/services/telemetry/service.py
index 76e82835c38e..9bed7f5536bf 100644
--- a/src/backend/base/langflow/services/telemetry/service.py
+++ b/src/backend/base/langflow/services/telemetry/service.py
@@ -1,4 +1,5 @@
import asyncio
+import contextlib
import os
import platform
from datetime import datetime, timezone
@@ -30,7 +31,7 @@ def __init__(self, settings_service: "SettingsService"):
self.settings_service = settings_service
self.base_url = settings_service.settings.telemetry_base_url
self.telemetry_queue: asyncio.Queue = asyncio.Queue()
- self.client = httpx.AsyncClient(timeout=None)
+ self.client = httpx.AsyncClient(timeout=10.0) # Set a reasonable timeout
self.running = False
self.package = get_version_info()["package"]
@@ -63,8 +64,12 @@ async def send_telemetry_data(self, payload: BaseModel, path: str | None = None)
logger.error(f"Failed to send telemetry data: {response.status_code} {response.text}")
else:
logger.debug("Telemetry data sent successfully.")
+ except httpx.HTTPStatusError as e:
+ logger.error(f"HTTP error occurred: {e}")
+ except httpx.RequestError as e:
+ logger.error(f"Request error occurred: {e}")
except Exception as e:
- logger.error(f"Failed to send telemetry data due to: {e}")
+ logger.error(f"Unexpected error occurred: {e}")
async def log_package_run(self, payload: RunPayload):
await self.telemetry_queue.put((self.send_telemetry_data, payload, "run"))
@@ -119,8 +124,10 @@ async def stop(self):
try:
self.running = False
await self.flush()
- self.worker_task.cancel()
if self.worker_task:
- await self.worker_task
+ self.worker_task.cancel()
+ with contextlib.suppress(asyncio.CancelledError):
+ await self.worker_task
+ await self.client.aclose()
except Exception as e:
logger.error(f"Error stopping tracing service: {e}")
diff --git a/src/backend/base/langflow/services/tracing/service.py b/src/backend/base/langflow/services/tracing/service.py
index 749a4b910423..d3307e8b31eb 100644
--- a/src/backend/base/langflow/services/tracing/service.py
+++ b/src/backend/base/langflow/services/tracing/service.py
@@ -7,7 +7,6 @@
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
-from langchain.callbacks.tracers.langchain import wait_for_all_tracers
from loguru import logger
from langflow.schema.data import Data
@@ -292,5 +291,4 @@ def end(
self._run_tree.add_metadata(metadata)
self._run_tree.end(outputs=outputs, error=error)
self._run_tree.post()
- wait_for_all_tracers()
self._run_link = self._run_tree.get_url()
diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
index 278614fd68a3..3ac87f76c0d0 100644
--- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
+++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx
@@ -1,6 +1,6 @@
import { cloneDeep } from "lodash";
import { LinkIcon, SparklesIcon } from "lucide-react";
-import { useEffect, useMemo, useState } from "react";
+import { Fragment, useEffect, useMemo, useState } from "react";
import IconComponent from "../../../../components/genericIconComponent";
import ShadTooltip from "../../../../components/shadTooltipComponent";
import { Input } from "../../../../components/ui/input";
@@ -296,69 +296,65 @@ export default function ExtraSidebar(): JSX.Element {
.filter((x) => PRIORITY_SIDEBAR_ORDER.includes(x))
.map((SBSectionName: keyof APIObjectType, index) =>
Object.keys(dataFilter[SBSectionName]).length > 0 ? (
- <>
-