Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 2 additions & 7 deletions crates/goose/src/agents/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1520,13 +1520,9 @@ impl Agent {
self.extension_manager
.suggest_disable_extensions_prompt()
.await,
Some(model_name),
model_name,
false,
);
tracing::debug!(
"Built system prompt with {} characters",
system_prompt.len()
);

let recipe_prompt = prompt_manager.get_recipe_prompt().await;
let tools = self
Expand All @@ -1537,7 +1533,6 @@ impl Agent {
tracing::error!("Failed to get tools for recipe creation: {}", e);
e
})?;
tracing::debug!("Retrieved {} tools for recipe creation", tools.len());

messages.push(Message::user().with_text(recipe_prompt));

Expand Down Expand Up @@ -1752,7 +1747,7 @@ mod tests {

let prompt_manager = agent.prompt_manager.lock().await;
let system_prompt =
prompt_manager.build_system_prompt(vec![], None, Value::Null, None, false);
prompt_manager.build_system_prompt(vec![], None, Value::Null, "gpt-4o", false);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is this hardcoded now?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is a test, so we have to provide it something. the agent here isn't set up with a model. the test doesn't seem to do much so we could also drop it


let final_output_tool_ref = agent.final_output_tool.lock().await;
let final_output_tool_system_prompt =
Expand Down
152 changes: 41 additions & 111 deletions crates/goose/src/agents/prompt_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use serde_json::Value;
use std::collections::HashMap;

use crate::agents::extension::ExtensionInfo;
use crate::agents::recipe_tools::dynamic_task_tools::should_enabled_subagents;
use crate::agents::router_tools::llm_search_tool_prompt;
use crate::providers::base::get_current_model;
use crate::{config::Config, prompt_template, utils::sanitize_unicode_tags};

pub struct PromptManager {
Expand Down Expand Up @@ -39,35 +39,12 @@ impl PromptManager {
self.system_prompt_override = Some(template);
}

/// Normalize a model name (replace - and / with _, lower case)
fn normalize_model_name(name: &str) -> String {
name.replace(['-', '/', '.'], "_").to_lowercase()
}

/// Map model (normalized) to prompt filenames; returns filename if a key is contained in the normalized model
fn model_prompt_map(model: &str) -> &'static str {
let mut map = HashMap::new();
map.insert("gpt_4_1", "system_gpt_4.1.md");
// Add more mappings as needed
let norm_model = Self::normalize_model_name(model);
for (key, val) in &map {
if norm_model.contains(key) {
return val;
}
}
"system.md"
}

/// Build the final system prompt
///
/// * `extensions_info` – extension information for each extension/MCP
/// * `frontend_instructions` – instructions for the "frontend" tool
pub fn build_system_prompt(
&self,
extensions_info: Vec<ExtensionInfo>,
frontend_instructions: Option<String>,
suggest_disable_extensions_prompt: Value,
model_name: Option<&str>,
model_name: &str,
router_enabled: bool,
) -> String {
let mut context: HashMap<&str, Value> = HashMap::new();
Expand Down Expand Up @@ -113,46 +90,30 @@ impl PromptManager {
Value::String(suggest_disable_extensions_prompt.to_string()),
);

// Add the mode to the context for conditional rendering
let config = Config::global();
let goose_mode = config.get_param("GOOSE_MODE").unwrap_or("auto".to_string());
context.insert("goose_mode", Value::String(goose_mode.clone()));
context.insert("is_autonomous", Value::Bool(goose_mode == "auto"));

// First check the global store, and only if it's not available, fall back to the provided model_name
let model_to_use: Option<String> =
get_current_model().or_else(|| model_name.map(|s| s.to_string()));
context.insert(
"enable_subagents",
Value::Bool(should_enabled_subagents(model_name)),
);

// Conditionally load the override prompt or the global system prompt
let base_prompt = if let Some(override_prompt) = &self.system_prompt_override {
let sanitized_override_prompt = sanitize_unicode_tags(override_prompt);
prompt_template::render_inline_once(&sanitized_override_prompt, &context)
.expect("Prompt should render")
} else if let Some(model) = &model_to_use {
// Use the fuzzy mapping to determine the prompt file, or fall back to legacy logic
let prompt_file = Self::model_prompt_map(model);
match prompt_template::render_global_file(prompt_file, &context) {
Ok(prompt) => prompt,
Err(_) => {
// Fall back to the standard system.md if model-specific one doesn't exist
prompt_template::render_global_file("system.md", &context)
.expect("Prompt should render")
}
}
} else {
prompt_template::render_global_file("system.md", &context)
.expect("Prompt should render")
};
}
.unwrap_or_else(|_| {
"You are a general-purpose AI agent called goose, created by Block".to_string()
});

let mut system_prompt_extras = self.system_prompt_extras.clone();
if goose_mode == "chat" {
system_prompt_extras.push(
"Right now you are in the chat only mode, no access to any tool use and system."
.to_string(),
);
} else {
system_prompt_extras
.push("Right now you are *NOT* in the chat only mode and have access to tool use and system.".to_string());
}

let sanitized_system_prompt_extras: Vec<String> = system_prompt_extras
Expand All @@ -173,74 +134,28 @@ impl PromptManager {

pub async fn get_recipe_prompt(&self) -> String {
let context: HashMap<&str, Value> = HashMap::new();
prompt_template::render_global_file("recipe.md", &context).expect("Prompt should render")
prompt_template::render_global_file("recipe.md", &context)
.unwrap_or_else(|_| "The recipe prompt is busted. Tell the user.".to_string())
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_normalize_model_name() {
assert_eq!(PromptManager::normalize_model_name("gpt-4.1"), "gpt_4_1");
assert_eq!(PromptManager::normalize_model_name("gpt/3.5"), "gpt_3_5");
assert_eq!(
PromptManager::normalize_model_name("GPT-3.5/PLUS"),
"gpt_3_5_plus"
);
}

#[test]
fn test_model_prompt_map_matches() {
// should match prompts based on contained normalized keys
assert_eq!(
PromptManager::model_prompt_map("gpt-4.1"),
"system_gpt_4.1.md"
);

assert_eq!(
PromptManager::model_prompt_map("gpt-4.1-2025-04-14"),
"system_gpt_4.1.md"
);

assert_eq!(
PromptManager::model_prompt_map("openai/gpt-4.1"),
"system_gpt_4.1.md"
);
assert_eq!(
PromptManager::model_prompt_map("goose-gpt-4-1"),
"system_gpt_4.1.md"
);
assert_eq!(
PromptManager::model_prompt_map("gpt-4-1-huge"),
"system_gpt_4.1.md"
);
}

#[test]
fn test_model_prompt_map_none() {
// should return system.md for unrecognized/unsupported model names
assert_eq!(PromptManager::model_prompt_map("llama-3-70b"), "system.md");
assert_eq!(PromptManager::model_prompt_map("goose"), "system.md");
assert_eq!(
PromptManager::model_prompt_map("claude-3.7-sonnet"),
"system.md"
);
assert_eq!(
PromptManager::model_prompt_map("xxx-unknown-model"),
"system.md"
);
}

#[test]
fn test_build_system_prompt_sanitizes_override() {
let mut manager = PromptManager::new();
let malicious_override = "System prompt\u{E0041}\u{E0042}\u{E0043}with hidden text";
manager.set_system_prompt_override(malicious_override.to_string());

let result =
manager.build_system_prompt(vec![], None, Value::String("".to_string()), None, false);
let result = manager.build_system_prompt(
vec![],
None,
Value::String("".to_string()),
"gpt-4o",
false,
);

assert!(!result.contains('\u{E0041}'));
assert!(!result.contains('\u{E0042}'));
Expand All @@ -255,8 +170,13 @@ mod tests {
let malicious_extra = "Extra instruction\u{E0041}\u{E0042}\u{E0043}hidden";
manager.add_system_prompt_extra(malicious_extra.to_string());

let result =
manager.build_system_prompt(vec![], None, Value::String("".to_string()), None, false);
let result = manager.build_system_prompt(
vec![],
None,
Value::String("".to_string()),
"gpt-4o",
false,
);

assert!(!result.contains('\u{E0041}'));
assert!(!result.contains('\u{E0042}'));
Expand All @@ -272,8 +192,13 @@ mod tests {
manager.add_system_prompt_extra("Second\u{E0042}instruction".to_string());
manager.add_system_prompt_extra("Third\u{E0043}instruction".to_string());

let result =
manager.build_system_prompt(vec![], None, Value::String("".to_string()), None, false);
let result = manager.build_system_prompt(
vec![],
None,
Value::String("".to_string()),
"gpt-4o",
false,
);

assert!(!result.contains('\u{E0041}'));
assert!(!result.contains('\u{E0042}'));
Expand All @@ -289,8 +214,13 @@ mod tests {
let legitimate_unicode = "Instruction with 世界 and 🌍 emojis";
manager.add_system_prompt_extra(legitimate_unicode.to_string());

let result =
manager.build_system_prompt(vec![], None, Value::String("".to_string()), None, false);
let result = manager.build_system_prompt(
vec![],
None,
Value::String("".to_string()),
"gpt-4o",
false,
);

assert!(result.contains("世界"));
assert!(result.contains("🌍"));
Expand All @@ -311,7 +241,7 @@ mod tests {
vec![malicious_extension_info],
None,
Value::String("".to_string()),
None,
"gpt-4o",
false,
);

Expand Down
13 changes: 13 additions & 0 deletions crates/goose/src/agents/recipe_tools/dynamic_task_tools.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ pub struct CreateDynamicTaskParams {

/// How to execute multiple tasks (default: parallel for multiple tasks, sequential for single task)
#[serde(skip_serializing_if = "Option::is_none")]
#[schemars(with = "Option<String>")]
pub execution_mode: Option<ExecutionModeParam>,
}

Expand Down Expand Up @@ -90,6 +91,18 @@ pub struct TaskParameter {
pub return_last_only: Option<bool>,
}

pub fn should_enabled_subagents(model_name: &str) -> bool {
let config = crate::config::Config::global();
let is_autonomous = config.get_param("GOOSE_MODE").unwrap_or("auto".to_string()) == "auto";
if !is_autonomous {
return false;
}
if model_name.starts_with("gemini") {
return false;
}
true
}

pub fn create_dynamic_task_tool() -> Tool {
let schema = schema_for!(CreateDynamicTaskParams);
let schema_value =
Expand Down
16 changes: 7 additions & 9 deletions crates/goose/src/agents/reply_parts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use crate::providers::toolshim::{
modify_system_prompt_for_tool_json, OllamaInterpreter,
};

use crate::agents::recipe_tools::dynamic_task_tools::should_enabled_subagents;
use crate::session::SessionManager;
use rmcp::model::Tool;

Expand All @@ -32,23 +33,20 @@ async fn toolshim_postprocess(
}

impl Agent {
/// Prepares tools and system prompt for a provider request
pub async fn prepare_tools_and_prompt(&self) -> anyhow::Result<(Vec<Tool>, Vec<Tool>, String)> {
pub async fn prepare_tools_and_prompt(&self) -> Result<(Vec<Tool>, Vec<Tool>, String)> {
// Get router enabled status
let router_enabled = self.tool_route_manager.is_router_enabled().await;

// Get tools from extension manager
let mut tools = self.list_tools_for_router().await;

let config = crate::config::Config::global();
let is_autonomous = config.get_param("GOOSE_MODE").unwrap_or("auto".to_string()) == "auto";

// If router is disabled and no tools were returned, fall back to regular tools
if !router_enabled && tools.is_empty() {
// Get all tools but filter out subagent tools if not in autonomous mode
tools = self.list_tools(None).await;
if !is_autonomous {
// Filter out subagent-related tools
let provider = self.provider().await?;
let model_name = provider.get_model_config().model_name;

if !should_enabled_subagents(&model_name) {
tools.retain(|tool| {
tool.name != crate::agents::subagent_execution_tool::subagent_execute_task_tool::SUBAGENT_EXECUTE_TASK_TOOL_NAME
&& tool.name != crate::agents::recipe_tools::dynamic_task_tools::DYNAMIC_TASK_TOOL_NAME_PREFIX
Expand Down Expand Up @@ -77,7 +75,7 @@ impl Agent {
self.extension_manager
.suggest_disable_extensions_prompt()
.await,
Some(model_name),
model_name,
router_enabled,
);

Expand Down
Loading
Loading