Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 10 additions & 4 deletions backend/openedx_ai_extensions/processors/llm_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import logging

from django.conf import settings
from litellm import completion

logger = logging.getLogger(__name__)
Expand All @@ -18,11 +19,14 @@ def __init__(self, config=None):
class_name = self.__class__.__name__
self.config = config.get(class_name, {})

self.config_profile = self.config.get("config", "default")

# Extract API configuration once during initialization
self.api_key = self.config.get("api_key")
self.model = self.config.get("model")
self.temperature = self.config.get("temperature") # No default
self.max_tokens = self.config.get("max_tokens") # No default
self.api_key = settings.AI_EXTENSIONS[self.config_profile]['API_KEY']
self.model = settings.AI_EXTENSIONS[self.config_profile]['LITELLM_MODEL']
self.timeout = settings.AI_EXTENSIONS[self.config_profile]['TIMEOUT']
self.temperature = settings.AI_EXTENSIONS[self.config_profile]['TEMPERATURE']
self.max_tokens = settings.AI_EXTENSIONS[self.config_profile]['MAX_TOKENS']

if not self.api_key:
logger.error("AI API key not configured")
Expand Down Expand Up @@ -57,6 +61,8 @@ def _call_completion_api(self, system_role, user_content):
completion_params["temperature"] = self.temperature
if self.max_tokens is not None:
completion_params["max_tokens"] = self.max_tokens
if self.timeout is not None:
completion_params["timeout"] = self.timeout

response = completion(**completion_params)
content = response.choices[0].message.content
Expand Down
5 changes: 3 additions & 2 deletions backend/openedx_ai_extensions/settings/common.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
"""
Common settings for the openedx_ai_extensions application.
"""
import logging

logger = logging.getLogger(__name__)


def plugin_settings(settings): # pylint: disable=unused-argument
Expand All @@ -10,5 +13,3 @@ def plugin_settings(settings): # pylint: disable=unused-argument
Args:
settings (dict): Django settings object
"""
settings.AI_MODEL = 'gpt-4.1-mini'
settings.OPENAI_API_KEY = "make_it_read_from_tutor"
19 changes: 19 additions & 0 deletions backend/openedx_ai_extensions/settings/production.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,22 @@ def plugin_settings(settings):
"""
# Apply common settings
common_settings(settings)
LITELLM_BASE = {
"TIMEOUT": 600, # Request timeout in seconds
"MAX_TOKENS": 4096, # Max tokens per request
"TEMPERATURE": 0.7, # Response randomness (0-1)
}

if hasattr(settings, "AI_EXTENSIONS"):
first_key = next(iter(settings.AI_EXTENSIONS))

# Merge base config into all profiles
merged_extensions = {}
for key, config in settings.AI_EXTENSIONS.items():
merged_extensions[key] = {**LITELLM_BASE, **config}

# Make first profile also default
settings.AI_EXTENSIONS = {
"default": {**LITELLM_BASE, **settings.AI_EXTENSIONS[first_key]},
**merged_extensions
}
6 changes: 1 addition & 5 deletions backend/openedx_ai_extensions/workflows/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import logging
from typing import Any, Dict, Optional

from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.db import models
Expand Down Expand Up @@ -79,11 +78,8 @@ def get_config(cls, action: str, course_id: Optional[str] = None):
"char_limit": 300,
},
'LLMProcessor': {
'api_key': settings.OPENAI_API_KEY,
'model': settings.AI_MODEL,
'temperature': 0.7,
# 'function': "summarize_content",
'function': "explain_like_five",
'config': "default",
},
},
actuator_config={}, # TODO: first I must make the actuator selection dynamic
Expand Down
Empty file added backend/tests/__init__.py
Empty file.
66 changes: 66 additions & 0 deletions backend/tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,3 +58,69 @@ def test_api_urls_are_registered():
# Test that the v1 workflows URL can be reversed
url = reverse("openedx_ai_extensions:api:v1:ai_pipelines")
assert url == "/openedx-ai-extensions/v1/workflows/"


@pytest.mark.django_db
def test_workflows_endpoint_requires_authentication(api_client): # pylint: disable=redefined-outer-name
"""
Test that the workflows endpoint requires authentication.
"""
url = reverse("openedx_ai_extensions:api:v1:ai_pipelines")

# Test POST without authentication
response = api_client.post(url, {}, format="json")
assert response.status_code == 302 # Redirect to login

# Test GET without authentication
response = api_client.get(url)
assert response.status_code == 302 # Redirect to login


@pytest.mark.django_db
@pytest.mark.usefixtures("user")
def test_workflows_post_with_authentication(api_client, course_key): # pylint: disable=redefined-outer-name
"""
Test POST request to workflows endpoint with authentication.
"""
api_client.login(username="testuser", password="password123")
url = reverse("openedx_ai_extensions:api:v1:ai_pipelines")

payload = {
"action": "summarize",
"courseId": str(course_key),
"context": {"unitId": "unit-123"},
"user_input": {"text": "Explain quantum physics"},
"requestId": "test-request-123",
}

response = api_client.post(url, payload, format="json")

# Should return 200 or 500 depending on workflow execution
assert response.status_code in [200, 400, 500]

# Response should be JSON
assert response["Content-Type"] == "application/json"

# Check for expected fields in response
data = response.json()
assert "requestId" in data
assert "timestamp" in data
assert "workflow_created" in data


@pytest.mark.django_db
@pytest.mark.usefixtures("user", "course_key")
def test_workflows_get_with_authentication(api_client): # pylint: disable=redefined-outer-name
"""
Test GET request to workflows endpoint with authentication.
"""
api_client.login(username="testuser", password="password123")
url = reverse("openedx_ai_extensions:api:v1:ai_pipelines")

response = api_client.get(url)

# Should return 200 or error status
assert response.status_code in [200, 400, 500]

# Response should be JSON
assert response["Content-Type"] == "application/json"
21 changes: 21 additions & 0 deletions tutor/openedx_ai_extensions/patches/openedx-auth
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{%- if AI_EXTENSIONS is defined %}
AI_EXTENSIONS:
{%- for profile_key, profile_config in AI_EXTENSIONS.items() %}
{{ profile_key }}:
{%- for key, value in profile_config.items() %}
{{ key }}: "{{ value }}"
{%- endfor %}
{%- endfor %}
{%- elif AI_EXTENSIONS_OPENAI_API_KEY is defined or AI_EXTENSIONS_ANTHROPIC_API_KEY is defined %}
AI_EXTENSIONS:
{%- if AI_EXTENSIONS_OPENAI_API_KEY is defined %}
openai:
API_KEY: "{{ AI_EXTENSIONS_OPENAI_API_KEY }}"
LITELLM_MODEL: "{{ AI_EXTENSIONS_OPENAI_MODEL | default("gpt-4.1-mini") }}"
{%- endif %}
{%- if AI_EXTENSIONS_ANTHROPIC_API_KEY is defined %}
anthropic:
API_KEY: "{{ AI_EXTENSIONS_ANTHROPIC_API_KEY }}"
LITELLM_MODEL: "{{ AI_EXTENSIONS_ANTHROPIC_MODEL | default("claude-3-haiku-20240307") }}"
{%- endif %}
{% endif %}
4 changes: 1 addition & 3 deletions tutor/openedx_ai_extensions/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@

import importlib_resources
from tutor import hooks
from tutormfe.hooks import MFE_APPS, PLUGIN_SLOTS

from .__about__ import __version__
from tutormfe.hooks import PLUGIN_SLOTS


########################
Expand Down