Skip to content

Commit e4dafbb

Browse files
committed
feat: formatting the LLM providers settings as profiles.
--- adding simple configuration with key only for openai and anthropic and override capacity
1 parent ecdda37 commit e4dafbb

File tree

6 files changed

+35
-40
lines changed

6 files changed

+35
-40
lines changed

backend/openedx_ai_extensions/processors/llm_processor.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
"""
44

55
import logging
6+
from django.conf import settings
67

78
from litellm import completion
89

@@ -18,11 +19,14 @@ def __init__(self, config=None):
1819
class_name = self.__class__.__name__
1920
self.config = config.get(class_name, {})
2021

22+
self.config_profile = self.config.get("config", "default")
23+
2124
# Extract API configuration once during initialization
22-
self.api_key = self.config.get("api_key")
23-
self.model = self.config.get("model")
24-
self.temperature = self.config.get("temperature") # No default
25-
self.max_tokens = self.config.get("max_tokens") # No default
25+
self.api_key = settings.AI_EXTENSIONS[self.config_profile]['API_KEY']
26+
self.model = settings.AI_EXTENSIONS[self.config_profile]['LITELLM_MODEL']
27+
self.timeout = settings.AI_EXTENSIONS[self.config_profile]['TIMEOUT']
28+
self.temperature = settings.AI_EXTENSIONS[self.config_profile]['TEMPERATURE']
29+
self.max_tokens = settings.AI_EXTENSIONS[self.config_profile]['MAX_TOKENS']
2630

2731
if not self.api_key:
2832
logger.error("AI API key not configured")
@@ -57,6 +61,11 @@ def _call_completion_api(self, system_role, user_content):
5761
completion_params["temperature"] = self.temperature
5862
if self.max_tokens is not None:
5963
completion_params["max_tokens"] = self.max_tokens
64+
if self.timeout is not None:
65+
completion_params["timeout"] = self.timeout
66+
67+
from pprint import pprint
68+
pprint(completion_params)
6069

6170
response = completion(**completion_params)
6271
content = response.choices[0].message.content
Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
"""
22
Common settings for the openedx_ai_extensions application.
33
"""
4-
import os
54
import logging
6-
from copy import deepcopy
75

86
logger = logging.getLogger(__name__)
97

@@ -15,19 +13,4 @@ def plugin_settings(settings): # pylint: disable=unused-argument
1513
Args:
1614
settings (dict): Django settings object
1715
"""
18-
CONFIG_DEFAULTS = {
19-
"default": {
20-
"API_KEY": "put_your_api_key_here",
21-
"LITELLM_MODEL": "gpt-5-mini",
22-
"TEMPERATURE": 1,
23-
}
24-
}
25-
config = deepcopy(CONFIG_DEFAULTS)
26-
if hasattr(settings, "OPENEDX_AI_EXTENSIONS"):
27-
for section, values in settings.OPENEDX_AI_EXTENSIONS.items():
28-
if section in config:
29-
logger.warning(f"OpenedX AI Extensions settings: {settings.OPENEDX_AI_EXTENSIONS}")
30-
config[section].update(values)
31-
else:
32-
config[section] = values
33-
settings.OPENEDX_AI_EXTENSIONS = config
16+
pass

backend/openedx_ai_extensions/settings/production.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,22 @@ def plugin_settings(settings):
1414
"""
1515
# Apply common settings
1616
common_settings(settings)
17+
LITELLM_BASE = {
18+
"TIMEOUT": 600, # Request timeout in seconds
19+
"MAX_TOKENS": 4096, # Max tokens per request
20+
"TEMPERATURE": 0.7, # Response randomness (0-1)
21+
}
22+
23+
if hasattr(settings, "AI_EXTENSIONS"):
24+
first_key = next(iter(settings.AI_EXTENSIONS))
25+
26+
# Merge base config into all profiles
27+
merged_extensions = {}
28+
for key, config in settings.AI_EXTENSIONS.items():
29+
merged_extensions[key] = {**LITELLM_BASE, **config}
30+
31+
# Make first profile also default
32+
settings.AI_EXTENSIONS = {
33+
"default": {**LITELLM_BASE, **settings.AI_EXTENSIONS[first_key]},
34+
**merged_extensions
35+
}

backend/openedx_ai_extensions/workflows/models.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,10 +79,8 @@ def get_config(cls, action: str, course_id: Optional[str] = None):
7979
"char_limit": 300,
8080
},
8181
'LLMProcessor': {
82-
'api_key': settings.OPENEDX_AI_EXTENSIONS['default']['API_KEY'],
83-
'model': settings.OPENEDX_AI_EXTENSIONS['default']['LITELLM_MODEL'],
84-
'temperature': settings.OPENEDX_AI_EXTENSIONS['default']['TEMPERATURE'],
85-
'function': "summarize_content",
82+
'function': "explain_like_five",
83+
'config': "default",
8684
},
8785
},
8886
actuator_config={}, # TODO: first I must make the actuator selection dynamic

tutor/openedx_ai_extensions/patches/openedx-common-settings

Lines changed: 0 additions & 1 deletion
This file was deleted.

tutor/openedx_ai_extensions/plugin.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,6 @@
66
from tutor import hooks
77
from tutormfe.hooks import PLUGIN_SLOTS
88

9-
hooks.Filters.CONFIG_DEFAULTS.add_items(
10-
[
11-
# Add your new settings that have default values here.
12-
# Each new setting is a pair: (setting_name, default_value).
13-
("OPENEDX_AI_EXTENSIONS", [{
14-
"default": {
15-
"API_KEY": "put_your_api_key_here",
16-
"LITELLM_MODEL": "gpt-5-mini",
17-
"TEMPERATURE": 1,
18-
}
19-
}]),
20-
]
21-
)
229

2310
########################
2411
# Plugin path management

0 commit comments

Comments
 (0)