diff --git a/.env.example b/.env.example index c6f7dbb6ebf..8b11217b5d4 100644 --- a/.env.example +++ b/.env.example @@ -27,16 +27,9 @@ FILE_SIZE_LIMIT=5242880 # GPT settings OPENAI_API_BASE="https://api.openai.com/v1" # deprecated +OPENAI_API_KEY="sk-" # deprecated GPT_ENGINE="gpt-3.5-turbo" # deprecated -# AI Assistant Settings -LLM_PROVIDER=openai # Can be "openai", "anthropic", or "google" -LLM_MODEL=gpt-4o-mini # The specific model you want to use - -OPENAI_API_KEY=your-openai-api-key -ANTHROPIC_API_KEY=your-anthropic-api-key -GEMINI_API_KEY=your-gemini-api-key - # Settings related to Docker DOCKERIZED=1 # deprecated diff --git a/apiserver/plane/app/views/external/base.py b/apiserver/plane/app/views/external/base.py index 33d3c6afeef..ae5c47f1455 100644 --- a/apiserver/plane/app/views/external/base.py +++ b/apiserver/plane/app/views/external/base.py @@ -24,7 +24,6 @@ class LLMProvider: """Base class for LLM provider configurations""" name: str = "" models: List[str] = [] - api_key_env: str = "" default_model: str = "" @classmethod @@ -38,7 +37,6 @@ def get_config(cls) -> Dict[str, str | List[str]]: class OpenAIProvider(LLMProvider): name = "OpenAI" models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"] - api_key_env = "OPENAI_API_KEY" default_model = "gpt-4o-mini" class AnthropicProvider(LLMProvider): @@ -53,13 +51,11 @@ class AnthropicProvider(LLMProvider): "claude-instant-1.2", "claude-instant-1" ] - api_key_env = "ANTHROPIC_API_KEY" default_model = "claude-3-sonnet-20240229" class GeminiProvider(LLMProvider): name = "Gemini" models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"] - api_key_env = "GEMINI_API_KEY" default_model = "gemini-pro" SUPPORTED_PROVIDERS = { @@ -73,14 +69,18 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]: Helper to get LLM configuration values, returns: - api_key, model, provider """ - provider_key, model = get_configuration_value([ + api_key, provider_key, model = get_configuration_value([ + { + "key": "LLM_API_KEY", + "default": os.environ.get("LLM_API_KEY", None), + }, { "key": "LLM_PROVIDER", "default": os.environ.get("LLM_PROVIDER", "openai"), }, { "key": "LLM_MODEL", - "default": None, + "default": os.environ.get("LLM_MODEL", None), }, ]) @@ -89,27 +89,22 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]: log_exception(ValueError(f"Unsupported provider: {provider_key}")) return None, None, None - api_key, _ = get_configuration_value([ - { - "key": provider.api_key_env, - "default": os.environ.get(provider.api_key_env, None), - } - ]) - if not api_key: log_exception(ValueError(f"Missing API key for provider: {provider.name}")) return None, None, None - # If no model specified, use provider's default + # If no model specified, use provider's default if not model: model = provider.default_model - # Validate model is supported by provider + # Validate model is supported by provider if model not in provider.models: log_exception(ValueError( f"Model {model} not supported by {provider.name}. " f"Supported models: {', '.join(provider.models)}" )) + return None, None, None + return api_key, model, provider_key diff --git a/apiserver/plane/license/management/commands/configure_instance.py b/apiserver/plane/license/management/commands/configure_instance.py index 6476049cfab..8458df5df6d 100644 --- a/apiserver/plane/license/management/commands/configure_instance.py +++ b/apiserver/plane/license/management/commands/configure_instance.py @@ -132,20 +132,8 @@ def handle(self, *args, **options): "is_encrypted": False, }, { - "key": "OPENAI_API_KEY", - "value": os.environ.get("OPENAI_API_KEY"), - "category": "AI", - "is_encrypted": True, - }, - { - "key": "ANTHROPIC_API_KEY", - "value": os.environ.get("ANTHROPIC_API_KEY"), - "category": "AI", - "is_encrypted": True, - }, - { - "key": "GEMINI_API_KEY", - "value": os.environ.get("GEMINI_API_KEY"), + "key": "LLM_API_KEY", + "value": os.environ.get("LLM_API_KEY"), "category": "AI", "is_encrypted": True, }, @@ -161,8 +149,9 @@ def handle(self, *args, **options): "category": "AI", "is_encrypted": False, }, + # Deprecated, use LLM_MODEL { - "key": "GPT_ENGINE", + "key": "GPT_ENGINE", "value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"), "category": "SMTP", "is_encrypted": False,