diff --git a/backend/openedx_ai_extensions/processors/llm_processor.py b/backend/openedx_ai_extensions/processors/llm_processor.py index fe6bf1f..2504790 100644 --- a/backend/openedx_ai_extensions/processors/llm_processor.py +++ b/backend/openedx_ai_extensions/processors/llm_processor.py @@ -4,6 +4,7 @@ import logging +from django.conf import settings from litellm import completion logger = logging.getLogger(__name__) @@ -18,11 +19,14 @@ def __init__(self, config=None): class_name = self.__class__.__name__ self.config = config.get(class_name, {}) + self.config_profile = self.config.get("config", "default") + # Extract API configuration once during initialization - self.api_key = self.config.get("api_key") - self.model = self.config.get("model") - self.temperature = self.config.get("temperature") # No default - self.max_tokens = self.config.get("max_tokens") # No default + self.api_key = settings.AI_EXTENSIONS[self.config_profile]['API_KEY'] + self.model = settings.AI_EXTENSIONS[self.config_profile]['LITELLM_MODEL'] + self.timeout = settings.AI_EXTENSIONS[self.config_profile]['TIMEOUT'] + self.temperature = settings.AI_EXTENSIONS[self.config_profile]['TEMPERATURE'] + self.max_tokens = settings.AI_EXTENSIONS[self.config_profile]['MAX_TOKENS'] if not self.api_key: logger.error("AI API key not configured") @@ -57,6 +61,8 @@ def _call_completion_api(self, system_role, user_content): completion_params["temperature"] = self.temperature if self.max_tokens is not None: completion_params["max_tokens"] = self.max_tokens + if self.timeout is not None: + completion_params["timeout"] = self.timeout response = completion(**completion_params) content = response.choices[0].message.content diff --git a/backend/openedx_ai_extensions/settings/common.py b/backend/openedx_ai_extensions/settings/common.py index 189785a..d1854b5 100644 --- a/backend/openedx_ai_extensions/settings/common.py +++ b/backend/openedx_ai_extensions/settings/common.py @@ -1,6 +1,9 @@ """ Common settings for the openedx_ai_extensions application. """ +import logging + +logger = logging.getLogger(__name__) def plugin_settings(settings): # pylint: disable=unused-argument @@ -10,5 +13,3 @@ def plugin_settings(settings): # pylint: disable=unused-argument Args: settings (dict): Django settings object """ - settings.AI_MODEL = 'gpt-4.1-mini' - settings.OPENAI_API_KEY = "make_it_read_from_tutor" diff --git a/backend/openedx_ai_extensions/settings/production.py b/backend/openedx_ai_extensions/settings/production.py index 2b891ca..41bbfe7 100644 --- a/backend/openedx_ai_extensions/settings/production.py +++ b/backend/openedx_ai_extensions/settings/production.py @@ -14,3 +14,22 @@ def plugin_settings(settings): """ # Apply common settings common_settings(settings) + LITELLM_BASE = { + "TIMEOUT": 600, # Request timeout in seconds + "MAX_TOKENS": 4096, # Max tokens per request + "TEMPERATURE": 0.7, # Response randomness (0-1) + } + + if hasattr(settings, "AI_EXTENSIONS"): + first_key = next(iter(settings.AI_EXTENSIONS)) + + # Merge base config into all profiles + merged_extensions = {} + for key, config in settings.AI_EXTENSIONS.items(): + merged_extensions[key] = {**LITELLM_BASE, **config} + + # Make first profile also default + settings.AI_EXTENSIONS = { + "default": {**LITELLM_BASE, **settings.AI_EXTENSIONS[first_key]}, + **merged_extensions + } diff --git a/backend/openedx_ai_extensions/workflows/models.py b/backend/openedx_ai_extensions/workflows/models.py index aa81066..5688065 100644 --- a/backend/openedx_ai_extensions/workflows/models.py +++ b/backend/openedx_ai_extensions/workflows/models.py @@ -5,7 +5,6 @@ import logging from typing import Any, Dict, Optional -from django.conf import settings from django.contrib.auth import get_user_model from django.core.exceptions import ValidationError from django.db import models @@ -79,11 +78,8 @@ def get_config(cls, action: str, course_id: Optional[str] = None): "char_limit": 300, }, 'LLMProcessor': { - 'api_key': settings.OPENAI_API_KEY, - 'model': settings.AI_MODEL, - 'temperature': 0.7, - # 'function': "summarize_content", 'function': "explain_like_five", + 'config': "default", }, }, actuator_config={}, # TODO: first I must make the actuator selection dynamic diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_api.py b/backend/tests/test_api.py index b685ad8..bf7f854 100644 --- a/backend/tests/test_api.py +++ b/backend/tests/test_api.py @@ -58,3 +58,69 @@ def test_api_urls_are_registered(): # Test that the v1 workflows URL can be reversed url = reverse("openedx_ai_extensions:api:v1:ai_pipelines") assert url == "/openedx-ai-extensions/v1/workflows/" + + +@pytest.mark.django_db +def test_workflows_endpoint_requires_authentication(api_client): # pylint: disable=redefined-outer-name + """ + Test that the workflows endpoint requires authentication. + """ + url = reverse("openedx_ai_extensions:api:v1:ai_pipelines") + + # Test POST without authentication + response = api_client.post(url, {}, format="json") + assert response.status_code == 302 # Redirect to login + + # Test GET without authentication + response = api_client.get(url) + assert response.status_code == 302 # Redirect to login + + +@pytest.mark.django_db +@pytest.mark.usefixtures("user") +def test_workflows_post_with_authentication(api_client, course_key): # pylint: disable=redefined-outer-name + """ + Test POST request to workflows endpoint with authentication. + """ + api_client.login(username="testuser", password="password123") + url = reverse("openedx_ai_extensions:api:v1:ai_pipelines") + + payload = { + "action": "summarize", + "courseId": str(course_key), + "context": {"unitId": "unit-123"}, + "user_input": {"text": "Explain quantum physics"}, + "requestId": "test-request-123", + } + + response = api_client.post(url, payload, format="json") + + # Should return 200 or 500 depending on workflow execution + assert response.status_code in [200, 400, 500] + + # Response should be JSON + assert response["Content-Type"] == "application/json" + + # Check for expected fields in response + data = response.json() + assert "requestId" in data + assert "timestamp" in data + assert "workflow_created" in data + + +@pytest.mark.django_db +@pytest.mark.usefixtures("user", "course_key") +def test_workflows_get_with_authentication(api_client): # pylint: disable=redefined-outer-name + """ + Test GET request to workflows endpoint with authentication. + """ + api_client.login(username="testuser", password="password123") + url = reverse("openedx_ai_extensions:api:v1:ai_pipelines") + + response = api_client.get(url) + + # Should return 200 or error status + assert response.status_code in [200, 400, 500] + + # Response should be JSON + assert response["Content-Type"] == "application/json" diff --git a/tutor/openedx_ai_extensions/patches/openedx-auth b/tutor/openedx_ai_extensions/patches/openedx-auth new file mode 100644 index 0000000..2116bbb --- /dev/null +++ b/tutor/openedx_ai_extensions/patches/openedx-auth @@ -0,0 +1,21 @@ +{%- if AI_EXTENSIONS is defined %} +AI_EXTENSIONS: +{%- for profile_key, profile_config in AI_EXTENSIONS.items() %} + {{ profile_key }}: + {%- for key, value in profile_config.items() %} + {{ key }}: "{{ value }}" + {%- endfor %} +{%- endfor %} +{%- elif AI_EXTENSIONS_OPENAI_API_KEY is defined or AI_EXTENSIONS_ANTHROPIC_API_KEY is defined %} +AI_EXTENSIONS: + {%- if AI_EXTENSIONS_OPENAI_API_KEY is defined %} + openai: + API_KEY: "{{ AI_EXTENSIONS_OPENAI_API_KEY }}" + LITELLM_MODEL: "{{ AI_EXTENSIONS_OPENAI_MODEL | default("gpt-4.1-mini") }}" + {%- endif %} + {%- if AI_EXTENSIONS_ANTHROPIC_API_KEY is defined %} + anthropic: + API_KEY: "{{ AI_EXTENSIONS_ANTHROPIC_API_KEY }}" + LITELLM_MODEL: "{{ AI_EXTENSIONS_ANTHROPIC_MODEL | default("claude-3-haiku-20240307") }}" + {%- endif %} +{% endif %} diff --git a/tutor/openedx_ai_extensions/plugin.py b/tutor/openedx_ai_extensions/plugin.py index a815644..3d18ad5 100644 --- a/tutor/openedx_ai_extensions/plugin.py +++ b/tutor/openedx_ai_extensions/plugin.py @@ -4,9 +4,7 @@ import importlib_resources from tutor import hooks -from tutormfe.hooks import MFE_APPS, PLUGIN_SLOTS - -from .__about__ import __version__ +from tutormfe.hooks import PLUGIN_SLOTS ########################