Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion python/Dockerfile.agents
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Agents Service - Lightweight Pydantic AI agents
FROM python:3.12-slim
FROM python:3.13-slim

WORKDIR /app

Expand Down
2 changes: 1 addition & 1 deletion python/Dockerfile.mcp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# MCP Service - Lightweight HTTP-based microservice
FROM python:3.12-slim
FROM python:3.13-slim

WORKDIR /app

Expand Down
4 changes: 2 additions & 2 deletions python/Dockerfile.server
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Server Service - Web crawling and document processing microservice
FROM python:3.12 AS builder
FROM python:3.13-slim AS builder

WORKDIR /build

Expand All @@ -18,7 +18,7 @@ RUN uv venv /venv && \
uv pip install --group server --group server-reranking

# Runtime stage
FROM python:3.12-slim
FROM python:3.13-slim

WORKDIR /app

Expand Down
2 changes: 1 addition & 1 deletion python/src/server/services/credential_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ async def _get_provider_api_key(self, provider: str) -> str | None:
key_mapping = {
"openai": "OPENAI_API_KEY",
"google": "GOOGLE_API_KEY",
"ollama": None, # No API key needed
"ollama": "OPENAI_API_KEY", # 复用 OPENAI key for Ollama
}

key_name = key_mapping.get(provider)
Expand Down
4 changes: 2 additions & 2 deletions python/src/server/services/llm_provider_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,9 @@ async def get_llm_client(provider: str | None = None, use_embedding_provider: bo
logger.info("OpenAI client created successfully")

elif provider_name == "ollama":
# Ollama requires an API key in the client but doesn't actually use it
# Ollama can use a dummy API key if none provided
client = openai.AsyncOpenAI(
api_key="ollama", # Required but unused by Ollama
api_key=api_key, # Use provided key or dummy value
base_url=base_url or "http://localhost:11434/v1",
)
logger.info(f"Ollama client created successfully with base URL: {base_url}")
Expand Down