Skip to content

Commit

Permalink
strategy pattern for llm config
Browse files Browse the repository at this point in the history
  • Loading branch information
akash-plane committed Dec 23, 2024
1 parent 411e16a commit 2d597b8
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 38 deletions.
9 changes: 1 addition & 8 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,9 @@ FILE_SIZE_LIMIT=5242880

# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
OPENAI_API_KEY="sk-" # deprecated
GPT_ENGINE="gpt-3.5-turbo" # deprecated

# AI Assistant Settings
LLM_PROVIDER=openai # Can be "openai", "anthropic", or "google"
LLM_MODEL=gpt-4o-mini # The specific model you want to use

OPENAI_API_KEY=your-openai-api-key
ANTHROPIC_API_KEY=your-anthropic-api-key
GEMINI_API_KEY=your-gemini-api-key

# Settings related to Docker
DOCKERIZED=1 # deprecated

Expand Down
25 changes: 10 additions & 15 deletions apiserver/plane/app/views/external/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ class LLMProvider:
"""Base class for LLM provider configurations"""
name: str = ""
models: List[str] = []
api_key_env: str = ""
default_model: str = ""

@classmethod
Expand All @@ -38,7 +37,6 @@ def get_config(cls) -> Dict[str, str | List[str]]:
class OpenAIProvider(LLMProvider):
name = "OpenAI"
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
api_key_env = "OPENAI_API_KEY"
default_model = "gpt-4o-mini"

class AnthropicProvider(LLMProvider):
Expand All @@ -53,13 +51,11 @@ class AnthropicProvider(LLMProvider):
"claude-instant-1.2",
"claude-instant-1"
]
api_key_env = "ANTHROPIC_API_KEY"
default_model = "claude-3-sonnet-20240229"

class GeminiProvider(LLMProvider):
name = "Gemini"
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
api_key_env = "GEMINI_API_KEY"
default_model = "gemini-pro"

SUPPORTED_PROVIDERS = {
Expand All @@ -73,14 +69,18 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
Helper to get LLM configuration values, returns:
- api_key, model, provider
"""
provider_key, model = get_configuration_value([
api_key, provider_key, model = get_configuration_value([
{
"key": "LLM_API_KEY",
"default": os.environ.get("LLM_API_KEY", None),
},
{
"key": "LLM_PROVIDER",
"default": os.environ.get("LLM_PROVIDER", "openai"),
},
{
"key": "LLM_MODEL",
"default": None,
"default": os.environ.get("LLM_MODEL", None),
},
])

Expand All @@ -89,27 +89,22 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
log_exception(ValueError(f"Unsupported provider: {provider_key}"))
return None, None, None

api_key, _ = get_configuration_value([
{
"key": provider.api_key_env,
"default": os.environ.get(provider.api_key_env, None),
}
])

if not api_key:
log_exception(ValueError(f"Missing API key for provider: {provider.name}"))
return None, None, None

# If no model specified, use provider's default
# If no model specified, use provider's default
if not model:
model = provider.default_model

# Validate model is supported by provider
# Validate model is supported by provider
if model not in provider.models:
log_exception(ValueError(
f"Model {model} not supported by {provider.name}. "
f"Supported models: {', '.join(provider.models)}"
))
return None, None, None

return api_key, model, provider_key


Expand Down
19 changes: 4 additions & 15 deletions apiserver/plane/license/management/commands/configure_instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,20 +132,8 @@ def handle(self, *args, **options):
"is_encrypted": False,
},
{
"key": "OPENAI_API_KEY",
"value": os.environ.get("OPENAI_API_KEY"),
"category": "AI",
"is_encrypted": True,
},
{
"key": "ANTHROPIC_API_KEY",
"value": os.environ.get("ANTHROPIC_API_KEY"),
"category": "AI",
"is_encrypted": True,
},
{
"key": "GEMINI_API_KEY",
"value": os.environ.get("GEMINI_API_KEY"),
"key": "LLM_API_KEY",
"value": os.environ.get("LLM_API_KEY"),
"category": "AI",
"is_encrypted": True,
},
Expand All @@ -161,8 +149,9 @@ def handle(self, *args, **options):
"category": "AI",
"is_encrypted": False,
},
# Deprecated, use LLM_MODEL
{
"key": "GPT_ENGINE",
"key": "GPT_ENGINE",
"value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
"category": "SMTP",
"is_encrypted": False,
Expand Down

0 comments on commit 2d597b8

Please sign in to comment.