diff --git a/crates/goose/src/model.rs b/crates/goose/src/model.rs index 22c4473c6e98..7c1096103c53 100644 --- a/crates/goose/src/model.rs +++ b/crates/goose/src/model.rs @@ -28,9 +28,14 @@ static MODEL_SPECIFIC_LIMITS: Lazy> = Lazy::new(|| map.insert("llama3.3", 128_000); // x.ai Grok models, https://docs.x.ai/docs/overview + map.insert("grok", 131_072); + + // Groq models, https://console.groq.com/docs/models + map.insert("gemma2-9b", 8_192); + map.insert("kimi-k2", 131_072); + map.insert("qwen3-32b", 131_072); map.insert("grok-3", 131_072); map.insert("grok-4", 256_000); // 256K - map.insert("qwen3-coder", 262_144); // 262K map diff --git a/crates/goose/src/providers/groq.rs b/crates/goose/src/providers/groq.rs index 14e300c02348..e4c843702d90 100644 --- a/crates/goose/src/providers/groq.rs +++ b/crates/goose/src/providers/groq.rs @@ -13,8 +13,13 @@ use std::time::Duration; use url::Url; pub const GROQ_API_HOST: &str = "https://api.groq.com"; -pub const GROQ_DEFAULT_MODEL: &str = "llama-3.3-70b-versatile"; -pub const GROQ_KNOWN_MODELS: &[&str] = &["gemma2-9b-it", "llama-3.3-70b-versatile"]; +pub const GROQ_DEFAULT_MODEL: &str = "moonshotai/kimi-k2-instruct"; +pub const GROQ_KNOWN_MODELS: &[&str] = &[ + "gemma2-9b-it", + "llama-3.3-70b-versatile", + "moonshotai/kimi-k2-instruct", + "qwen/qwen3-32b", +]; pub const GROQ_DOC_URL: &str = "https://console.groq.com/docs/models"; diff --git a/documentation/docs/getting-started/providers.md b/documentation/docs/getting-started/providers.md index edf0b7639c0a..05bf5d3f6a38 100644 --- a/documentation/docs/getting-started/providers.md +++ b/documentation/docs/getting-started/providers.md @@ -247,6 +247,41 @@ These free options are a great way to get started with Goose and explore its cap ::: +### Groq +Groq provides free access to open source models with high-speed inference. To use Groq with Goose, you need an API key from [Groq Console](https://console.groq.com/keys). + +Groq offers several open source models that support tool calling: +- **moonshotai/kimi-k2-instruct** - Mixture-of-Experts model with 1 trillion parameters, optimized for agentic intelligence and tool use +- **qwen/qwen3-32b** - 32.8 billion parameter model with advanced reasoning and multilingual capabilities +- **gemma2-9b-it** - Google's Gemma 2 model with instruction tuning +- **llama-3.3-70b-versatile** - Meta's Llama 3.3 model for versatile applications + +To set up Groq with Goose, follow these steps: + + + + **To update your LLM provider and API key:** + + 1. Click the button in the top-left to open the sidebar. + 2. Click the `Settings` button on the sidebar. + 3. Click the `Models` tab. + 4. Click `Configure Providers` + 5. Choose `Groq` as provider from the list. + 6. Click `Configure`, enter your API key, and click `Submit`. + + + + 1. Run: + ```sh + goose configure + ``` + 2. Select `Configure Providers` from the menu. + 3. Follow the prompts to choose `Groq` as the provider. + 4. Enter your API key when prompted. + 5. Enter the Groq model of your choice (e.g., `moonshotai/kimi-k2-instruct`). + + + ### Google Gemini Google Gemini provides a free tier. To start using the Gemini API with Goose, you need an API Key from [Google AI studio](https://aistudio.google.com/app/apikey).