diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000000..bbcf0fa410 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "Archon", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/python/Dockerfile.agents b/python/Dockerfile.agents index 2013bc3c60..af62d2925d 100644 --- a/python/Dockerfile.agents +++ b/python/Dockerfile.agents @@ -1,5 +1,5 @@ # Agents Service - Lightweight Pydantic AI agents -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/python/Dockerfile.mcp b/python/Dockerfile.mcp index 045da7120d..310ec9c7a5 100644 --- a/python/Dockerfile.mcp +++ b/python/Dockerfile.mcp @@ -1,5 +1,5 @@ # MCP Service - Lightweight HTTP-based microservice -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/python/Dockerfile.server b/python/Dockerfile.server index 6e0ea5585f..7e88432c40 100644 --- a/python/Dockerfile.server +++ b/python/Dockerfile.server @@ -1,5 +1,5 @@ # Server Service - Web crawling and document processing microservice -FROM python:3.12 AS builder +FROM python:3.13-slim AS builder WORKDIR /build @@ -18,7 +18,7 @@ RUN uv venv /venv && \ uv pip install --group server --group server-reranking # Runtime stage -FROM python:3.12-slim +FROM python:3.13-slim WORKDIR /app diff --git a/python/src/server/services/credential_service.py b/python/src/server/services/credential_service.py index 017c3b2af1..a890a84848 100644 --- a/python/src/server/services/credential_service.py +++ b/python/src/server/services/credential_service.py @@ -444,7 +444,7 @@ async def _get_provider_api_key(self, provider: str) -> str | None: key_mapping = { "openai": "OPENAI_API_KEY", "google": "GOOGLE_API_KEY", - "ollama": None, # No API key needed + "ollama": "OPENAI_API_KEY", # 复用 OPENAI key for Ollama } key_name = key_mapping.get(provider) diff --git a/python/src/server/services/llm_provider_service.py b/python/src/server/services/llm_provider_service.py index d7c834f9f2..b5a08f96f0 100644 --- a/python/src/server/services/llm_provider_service.py +++ b/python/src/server/services/llm_provider_service.py @@ -101,9 +101,9 @@ async def get_llm_client(provider: str | None = None, use_embedding_provider: bo logger.info("OpenAI client created successfully") elif provider_name == "ollama": - # Ollama requires an API key in the client but doesn't actually use it + # Ollama can use a dummy API key if none provided client = openai.AsyncOpenAI( - api_key="ollama", # Required but unused by Ollama + api_key=api_key, # Use provided key or dummy value base_url=base_url or "http://localhost:11434/v1", ) logger.info(f"Ollama client created successfully with base URL: {base_url}")