Skip to content

Commit e73754b

Browse files
authored
Update database file path handling and server shutdown message (#1686)
* Update database file path handling in base.py and version.py * Update server.py to handle SIGINT signal in LangflowUvicornWorker * Add shutdown message when shutting down Langflow in main.py * Update datetime type for created_at and updated_at fields in apikey and variable tables * Update package versions in pyproject.toml and poetry.lock files * Update package versions in pyproject.toml and poetry.lock files * Fix import error in base.py * Refactor database file path handling in base.py * Update unit test command in python_test.yml
1 parent e38ab09 commit e73754b

File tree

12 files changed

+490
-330
lines changed

12 files changed

+490
-330
lines changed

.github/workflows/python_test.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,4 +41,4 @@ jobs:
4141
poetry install
4242
- name: Run unit tests
4343
run: |
44-
make tests
44+
make tests args="-n auto"

poetry.lock

+135-133
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "langflow"
3-
version = "1.0.0a17"
3+
version = "1.0.0a18"
44
description = "A Python package with a built-in web application"
55
authors = ["Logspace <[email protected]>"]
66
maintainers = [

src/backend/base/langflow/__main__.py

+24-17
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,6 @@
99
import httpx
1010
import typer
1111
from dotenv import load_dotenv
12-
from langflow.main import setup_app
13-
from langflow.services.database.utils import session_getter
14-
from langflow.services.deps import get_db_service
15-
from langflow.services.utils import initialize_services
16-
from langflow.utils.logger import configure, logger
17-
from langflow.utils.util import update_settings
1812
from multiprocess import Process, cpu_count # type: ignore
1913
from packaging import version as pkg_version
2014
from rich import box
@@ -23,6 +17,13 @@
2317
from rich.panel import Panel
2418
from rich.table import Table
2519

20+
from langflow.main import setup_app
21+
from langflow.services.database.utils import session_getter
22+
from langflow.services.deps import get_db_service
23+
from langflow.services.utils import initialize_services
24+
from langflow.utils.logger import configure, logger
25+
from langflow.utils.util import update_settings
26+
2627
console = Console()
2728

2829
app = typer.Typer(no_args_is_help=True)
@@ -151,17 +152,21 @@ def run(
151152
# Define an env variable to know if we are just testing the server
152153
if "pytest" in sys.modules:
153154
return
154-
155-
if platform.system() in ["Windows"]:
156-
# Run using uvicorn on MacOS and Windows
157-
# Windows doesn't support gunicorn
158-
# MacOS requires an env variable to be set to use gunicorn
159-
run_on_windows(host, port, log_level, options, app)
160-
else:
161-
# Run using gunicorn on Linux
162-
run_on_mac_or_linux(host, port, log_level, options, app)
163-
if open_browser:
164-
click.launch(f"http://{host}:{port}")
155+
try:
156+
if platform.system() in ["Windows"]:
157+
# Run using uvicorn on MacOS and Windows
158+
# Windows doesn't support gunicorn
159+
# MacOS requires an env variable to be set to use gunicorn
160+
process = run_on_windows(host, port, log_level, options, app)
161+
else:
162+
# Run using gunicorn on Linux
163+
process = run_on_mac_or_linux(host, port, log_level, options, app)
164+
if open_browser:
165+
click.launch(f"http://{host}:{port}")
166+
if process:
167+
process.join()
168+
except KeyboardInterrupt:
169+
pass
165170

166171

167172
def wait_for_server_ready(host, port):
@@ -182,6 +187,7 @@ def run_on_mac_or_linux(host, port, log_level, options, app):
182187
wait_for_server_ready(host, port)
183188

184189
print_banner(host, port)
190+
return webapp_process
185191

186192

187193
def run_on_windows(host, port, log_level, options, app):
@@ -190,6 +196,7 @@ def run_on_windows(host, port, log_level, options, app):
190196
"""
191197
print_banner(host, port)
192198
run_langflow(host, port, log_level, options, app)
199+
return None
193200

194201

195202
def is_port_in_use(port, host="localhost"):
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
"""Change datetime type
2+
3+
Revision ID: 79e675cb6752
4+
Revises: e3bc869fa272
5+
Create Date: 2024-04-11 19:23:10.697335
6+
7+
"""
8+
from calendar import c
9+
from typing import Sequence, Union
10+
11+
import sqlalchemy as sa
12+
from alembic import op
13+
from sqlalchemy.dialects import postgresql
14+
from sqlalchemy.engine.reflection import Inspector
15+
16+
# revision identifiers, used by Alembic.
17+
revision: str = "79e675cb6752"
18+
down_revision: Union[str, None] = "e3bc869fa272"
19+
branch_labels: Union[str, Sequence[str], None] = None
20+
depends_on: Union[str, Sequence[str], None] = None
21+
22+
23+
def upgrade() -> None:
24+
conn = op.get_bind()
25+
inspector = Inspector.from_engine(conn) # type: ignore
26+
table_names = inspector.get_table_names()
27+
# ### commands auto generated by Alembic - please adjust! ###
28+
if "apikey" in table_names:
29+
columns = inspector.get_columns("apikey")
30+
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
31+
if created_at_column is not None and created_at_column["type"] == postgresql.TIMESTAMP():
32+
with op.batch_alter_table("apikey", schema=None) as batch_op:
33+
batch_op.alter_column(
34+
"created_at",
35+
existing_type=postgresql.TIMESTAMP(),
36+
type_=sa.DateTime(timezone=True),
37+
existing_nullable=False,
38+
)
39+
if "variable" in table_names:
40+
columns = inspector.get_columns("variable")
41+
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
42+
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
43+
with op.batch_alter_table("variable", schema=None) as batch_op:
44+
if created_at_column is not None and created_at_column["type"] == postgresql.TIMESTAMP():
45+
batch_op.alter_column(
46+
"created_at",
47+
existing_type=postgresql.TIMESTAMP(),
48+
type_=sa.DateTime(timezone=True),
49+
existing_nullable=True,
50+
)
51+
if updated_at_column is not None and updated_at_column["type"] == postgresql.TIMESTAMP():
52+
batch_op.alter_column(
53+
"updated_at",
54+
existing_type=postgresql.TIMESTAMP(),
55+
type_=sa.DateTime(timezone=True),
56+
existing_nullable=True,
57+
)
58+
59+
# ### end Alembic commands ###
60+
61+
62+
def downgrade() -> None:
63+
conn = op.get_bind()
64+
inspector = Inspector.from_engine(conn) # type: ignore
65+
table_names = inspector.get_table_names()
66+
# ### commands auto generated by Alembic - please adjust! ###
67+
if "variable" in table_names:
68+
columns = inspector.get_columns("variable")
69+
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
70+
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
71+
with op.batch_alter_table("variable", schema=None) as batch_op:
72+
if updated_at_column is not None and updated_at_column["type"] == sa.DateTime(timezone=True):
73+
batch_op.alter_column(
74+
"updated_at",
75+
existing_type=sa.DateTime(timezone=True),
76+
type_=postgresql.TIMESTAMP(),
77+
existing_nullable=True,
78+
)
79+
if created_at_column is not None and created_at_column["type"] == sa.DateTime(timezone=True):
80+
batch_op.alter_column(
81+
"created_at",
82+
existing_type=sa.DateTime(timezone=True),
83+
type_=postgresql.TIMESTAMP(),
84+
existing_nullable=True,
85+
)
86+
87+
if "apikey" in table_names:
88+
columns = inspector.get_columns("apikey")
89+
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
90+
if created_at_column is not None and created_at_column["type"] == sa.DateTime(timezone=True):
91+
with op.batch_alter_table("apikey", schema=None) as batch_op:
92+
batch_op.alter_column(
93+
"created_at",
94+
existing_type=sa.DateTime(timezone=True),
95+
type_=postgresql.TIMESTAMP(),
96+
existing_nullable=False,
97+
)
98+
99+
# ### end Alembic commands ###

src/backend/base/langflow/main.py

+3
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from fastapi.responses import FileResponse
1111
from fastapi.staticfiles import StaticFiles
1212
from loguru import logger
13+
from rich import print as rprint
1314

1415
from langflow.api import router
1516
from langflow.initial_setup.setup import create_or_update_starter_projects
@@ -28,6 +29,8 @@ async def lifespan(app: FastAPI):
2829
LangfuseInstance.update()
2930
create_or_update_starter_projects()
3031
yield
32+
# Shutdown message
33+
rprint("[bold red]Shutting down Langflow...[/bold red]")
3134
teardown_services()
3235

3336
return lifespan

src/backend/base/langflow/server.py

+17
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1+
import asyncio
12
import logging
3+
import signal
24

35
from gunicorn import glogging # type: ignore
46
from gunicorn.app.base import BaseApplication # type: ignore
@@ -10,6 +12,21 @@
1012
class LangflowUvicornWorker(UvicornWorker):
1113
CONFIG_KWARGS = {"loop": "asyncio"}
1214

15+
def _install_sigint_handler(self) -> None:
16+
"""Install a SIGQUIT handler on workers.
17+
18+
- https://github.com/encode/uvicorn/issues/1116
19+
- https://github.com/benoitc/gunicorn/issues/2604
20+
"""
21+
22+
loop = asyncio.get_running_loop()
23+
loop.add_signal_handler(signal.SIGINT, self.handle_exit, signal.SIGINT, None)
24+
25+
async def _serve(self) -> None:
26+
# We do this to not log the "Worker (pid:XXXXX) was sent SIGINT"
27+
self._install_sigint_handler()
28+
await super()._serve()
29+
1330

1431
class Logger(glogging.Logger):
1532
"""Implements and overrides the gunicorn logging interface.

src/backend/base/langflow/services/settings/base.py

+36-7
Original file line numberDiff line numberDiff line change
@@ -151,21 +151,50 @@ def set_database_url(cls, value, values):
151151
# if there is a database in that location
152152
if not values["CONFIG_DIR"]:
153153
raise ValueError("CONFIG_DIR not set, please set it or provide a DATABASE_URL")
154-
155-
new_path = f"{values['CONFIG_DIR']}/langflow.db"
156-
if Path("./langflow.db").exists():
154+
from langflow.version import is_pre_release # type: ignore
155+
156+
pre_db_file_name = "langflow-pre.db"
157+
db_file_name = "langflow.db"
158+
new_pre_path = f"{values['CONFIG_DIR']}/{pre_db_file_name}"
159+
new_path = f"{values['CONFIG_DIR']}/{db_file_name}"
160+
final_path = None
161+
if is_pre_release:
162+
if Path(new_pre_path).exists():
163+
final_path = new_pre_path
164+
elif Path(new_path).exists():
165+
# We need to copy the current db to the new location
166+
logger.debug("Copying existing database to new location")
167+
copy2(new_path, new_pre_path)
168+
logger.debug(f"Copied existing database to {new_pre_path}")
169+
elif Path(f"./{db_file_name}").exists():
170+
logger.debug("Copying existing database to new location")
171+
copy2(f"./{db_file_name}", new_pre_path)
172+
logger.debug(f"Copied existing database to {new_pre_path}")
173+
else:
174+
logger.debug(f"Database already exists at {new_pre_path}, using it")
175+
final_path = new_pre_path
176+
else:
157177
if Path(new_path).exists():
158178
logger.debug(f"Database already exists at {new_path}, using it")
159-
else:
179+
final_path = new_path
180+
elif Path("./{db_file_name}").exists():
160181
try:
161182
logger.debug("Copying existing database to new location")
162-
copy2("./langflow.db", new_path)
183+
copy2("./{db_file_name}", new_path)
163184
logger.debug(f"Copied existing database to {new_path}")
164185
except Exception:
165186
logger.error("Failed to copy database, using default path")
166-
new_path = "./langflow.db"
187+
new_path = "./{db_file_name}"
188+
else:
189+
final_path = new_path
190+
191+
if final_path is None:
192+
if is_pre_release:
193+
final_path = new_pre_path
194+
else:
195+
final_path = new_path
167196

168-
value = f"sqlite:///{new_path}"
197+
value = f"sqlite:///{final_path}"
169198

170199
return value
171200

0 commit comments

Comments
 (0)