-
Notifications
You must be signed in to change notification settings - Fork 2.3k
feat: dynamically load ollama models #5309
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -285,6 +285,43 @@ impl Provider for OllamaProvider { | |
| } | ||
| })) | ||
| } | ||
|
|
||
| /// Fetch the list of available models from Ollama | ||
| async fn fetch_supported_models(&self) -> Result<Option<Vec<String>>, ProviderError> { | ||
| let response = self | ||
| .api_client | ||
| .response_get("api/tags") | ||
| .await | ||
| .map_err(|e| ProviderError::RequestFailed(format!("Failed to fetch models: {}", e)))?; | ||
|
|
||
| if !response.status().is_success() { | ||
| return Err(ProviderError::RequestFailed(format!( | ||
| "Failed to fetch models: HTTP {}", | ||
| response.status() | ||
| ))); | ||
| } | ||
|
|
||
| let json_response = response.json::<Value>().await.map_err(|e| { | ||
| ProviderError::RequestFailed(format!("Failed to parse response: {}", e)) | ||
| })?; | ||
|
|
||
| let models = json_response | ||
| .get("models") | ||
| .and_then(|m| m.as_array()) | ||
| .ok_or_else(|| { | ||
| ProviderError::RequestFailed("No models array in response".to_string()) | ||
| })?; | ||
|
|
||
| let mut model_names: Vec<String> = models | ||
| .iter() | ||
| .filter_map(|model| model.get("name").and_then(|n| n.as_str()).map(String::from)) | ||
| .collect(); | ||
|
|
||
| // Sort alphabetically | ||
| model_names.sort(); | ||
|
|
||
| Ok(Some(model_names)) | ||
| } | ||
| } | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this feels more verbos than it needs to be - do we need to sort the models? |
||
|
|
||
| impl OllamaProvider { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,7 +13,7 @@ interface LeadWorkerSettingsProps { | |
| } | ||
|
|
||
| export function LeadWorkerSettings({ isOpen, onClose }: LeadWorkerSettingsProps) { | ||
| const { read, upsert, getProviders, remove } = useConfig(); | ||
| const { read, upsert, getProviders, getProviderModels, remove } = useConfig(); | ||
| const { currentModel } = useModelAndProvider(); | ||
| const [leadModel, setLeadModel] = useState<string>(''); | ||
| const [workerModel, setWorkerModel] = useState<string>(''); | ||
|
|
@@ -103,13 +103,39 @@ export function LeadWorkerSettings({ isOpen, onClose }: LeadWorkerSettingsProps) | |
| const providers = await getProviders(false); | ||
| const activeProviders = providers.filter((p) => p.is_configured); | ||
|
|
||
| activeProviders.forEach(({ metadata, name }) => { | ||
| if (metadata.known_models) { | ||
| metadata.known_models.forEach((model) => { | ||
| const modelPromises = activeProviders.map(async (p) => { | ||
|
||
| try { | ||
| const models = await getProviderModels(p.name); | ||
| return { provider: p, models, error: null }; | ||
| } catch (error) { | ||
| return { provider: p, models: null, error }; | ||
| } | ||
| }); | ||
|
|
||
| const results = await Promise.all(modelPromises); | ||
|
|
||
| // Process results and build options | ||
| results.forEach(({ provider: p, models, error }) => { | ||
| if (error) { | ||
| console.error(`Error fetching models for provider ${p.name}:`, error); | ||
| } | ||
|
|
||
| // Use dynamically fetched models if available | ||
| if (models && models.length > 0) { | ||
| models.forEach((modelName) => { | ||
| options.push({ | ||
| value: modelName, | ||
| label: `${modelName} (${p.metadata.display_name})`, | ||
| provider: p.name, | ||
| }); | ||
| }); | ||
| } else if (p.metadata.known_models && p.metadata.known_models.length > 0) { | ||
| // Fallback to known_models if no models were fetched or on error | ||
| p.metadata.known_models.forEach((model) => { | ||
| options.push({ | ||
| value: model.name, | ||
| label: `${model.name} (${metadata.display_name})`, | ||
| provider: name, | ||
| label: `${model.name} (${p.metadata.display_name})`, | ||
| provider: p.name, | ||
| }); | ||
| }); | ||
| } | ||
|
|
@@ -128,7 +154,7 @@ export function LeadWorkerSettings({ isOpen, onClose }: LeadWorkerSettingsProps) | |
| }; | ||
|
|
||
| loadConfig(); | ||
| }, [read, getProviders, currentModel, isOpen]); | ||
| }, [read, getProviders, getProviderModels, currentModel, isOpen]); | ||
|
|
||
| // If current models are not in the list (e.g., previously set to custom), switch to custom mode | ||
| useEffect(() => { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
rm