diff --git a/Cargo.lock b/Cargo.lock index e56eee6..02a88ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1182,7 +1182,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smartcat" -version = "1.3.0" +version = "1.4.1" dependencies = [ "clap", "device_query", diff --git a/Cargo.toml b/Cargo.toml index b481acb..7116db5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "smartcat" -version = "1.4.0" +version = "1.4.1" authors = ["Emilien Fugier "] description = ''' Putting a brain behind `cat`. diff --git a/README.md b/README.md index 46d62e4..09f442e 100644 --- a/README.md +++ b/README.md @@ -242,7 +242,7 @@ Three files are used: [ollama] # local API, no key required url = "http://localhost:11434/api/chat" default_model = "phi3" -timeout_seconds = 30 +timeout_seconds = 180 # default timeout if not specified [openai] # each supported api has their own config section with api and url api_key = "" @@ -361,12 +361,14 @@ sc test -v -c src/**/* The recording is then sent to a speech to text model, the resulting transcript is finally added to the prompt and sent to the text model to get an answer. -On linux: -On Mac: -On windows: +On linux: TODO +On Mac: TODO +On windows: TODO To debug, you can check the `conversation.toml` file or listen to the `audio.wav` in the smart config home and see what the model heard and transcripted. +This feature shoud be offered as an extra down the road, totally optional on install. PRs are welcomed! + ## How to help? diff --git a/src/config/api.rs b/src/config/api.rs index 029934e..c84bba6 100644 --- a/src/config/api.rs +++ b/src/config/api.rs @@ -64,8 +64,15 @@ pub struct ApiConfig { pub default_model: Option, #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub timeout_seconds: Option, + #[serde( + default = "default_timeout_seconds", + skip_serializing_if = "Option::is_none" + )] + pub timeout_seconds: Option, +} + +pub(super) fn default_timeout_seconds() -> Option { + Some(180) } impl Default for ApiConfig { @@ -103,7 +110,7 @@ impl ApiConfig { url: String::from("http://localhost:11434/api/chat"), default_model: Some(String::from("phi3")), version: None, - timeout_seconds: Some(30), + timeout_seconds: Some(180), } } @@ -114,7 +121,7 @@ impl ApiConfig { url: String::from("https://api.openai.com/v1/chat/completions"), default_model: Some(String::from("gpt-4")), version: None, - timeout_seconds: Some(30), + timeout_seconds: None, } } @@ -125,7 +132,7 @@ impl ApiConfig { url: String::from("https://api.mistral.ai/v1/chat/completions"), default_model: Some(String::from("mistral-medium")), version: None, - timeout_seconds: Some(30), + timeout_seconds: None, } } @@ -136,7 +143,7 @@ impl ApiConfig { url: String::from("https://api.groq.com/openai/v1/chat/completions"), default_model: Some(String::from("llama3-70b-8192")), version: None, - timeout_seconds: Some(30), + timeout_seconds: None, } } @@ -147,7 +154,7 @@ impl ApiConfig { url: String::from("https://api.anthropic.com/v1/messages"), default_model: Some(String::from("claude-3-opus-20240229")), version: Some(String::from("2023-06-01")), - timeout_seconds: Some(30), + timeout_seconds: None, } } } diff --git a/src/config/mod.rs b/src/config/mod.rs index 4d16a53..98e5ac6 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -107,7 +107,7 @@ fn is_executable_in_path(executable_name: &str) -> bool { mod tests { use crate::{ config::{ - api::{api_keys_path, Api, ApiConfig}, + api::{api_keys_path, default_timeout_seconds, Api, ApiConfig}, ensure_config_files, prompt::{prompts_path, Prompt}, resolve_config_path, @@ -293,24 +293,21 @@ mod tests { // Check if the content matches the default values // API - assert_eq!( - api_config.get(&Prompt::default().api.to_string()), - Some(&ApiConfig::default()) - ); - assert_eq!( - api_config.get(&Api::Mistral.to_string()), - Some(&ApiConfig::mistral()) - ); - - assert_eq!( - api_config.get(&Api::Groq.to_string()), - Some(&ApiConfig::groq()) - ); - - assert_eq!( - api_config.get(&Api::Anthropic.to_string()), - Some(&ApiConfig::anthropic()) - ); + for (api, expected_config) in [ + (Prompt::default().api.to_string(), ApiConfig::default()), + (Api::Mistral.to_string(), ApiConfig::mistral()), + (Api::Groq.to_string(), ApiConfig::groq()), + (Api::Anthropic.to_string(), ApiConfig::anthropic()), + ] { + let config = api_config.get(&api).unwrap(); + assert_eq!( + ApiConfig { + timeout_seconds: default_timeout_seconds(), + ..expected_config + }, + *config + ); + } // Prompts let default_prompt = Prompt::default(); diff --git a/src/main.rs b/src/main.rs index b66f2a9..6bd624f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -24,7 +24,7 @@ const DEFAULT_PROMPT_NAME: &str = "default"; #[command( name = "smartcat (sc)", author = "Emilien Fugier", - version = "1.4.0", + version = "1.4.1", about = "Putting a brain behind `cat`. CLI interface to bring language models in the Unix ecosystem 🐈‍⬛", long_about = None, after_help = "Examples: