Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
196 changes: 119 additions & 77 deletions crates/goose/src/providers/formats/openai_responses.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,59 +277,37 @@ pub enum ContentPart {
},
}

fn add_conversation_history(input_items: &mut Vec<Value>, messages: &[Message]) {
fn add_message_items(input_items: &mut Vec<Value>, messages: &[Message]) {
for message in messages.iter().filter(|m| m.is_agent_visible()) {
let has_only_tool_content = message.content.iter().all(|c| {
matches!(
c,
MessageContent::ToolRequest(_) | MessageContent::ToolResponse(_)
)
});

if has_only_tool_content {
continue;
}

if message.role != Role::User && message.role != Role::Assistant {
continue;
}

let role = match message.role {
Role::User => "user",
Role::Assistant => "assistant",
};

let mut content_items = Vec::new();
let mut text_items = Vec::new();

for content in &message.content {
if let MessageContent::Text(text) = content {
if !text.text.is_empty() {
match content {
MessageContent::Text(text) if !text.text.is_empty() => {
let content_type = if message.role == Role::Assistant {
"output_text"
} else {
"input_text"
};
content_items.push(json!({
text_items.push(json!({
"type": content_type,
"text": text.text
}));
}
}
}

if !content_items.is_empty() {
input_items.push(json!({
"role": role,
"content": content_items
}));
}
}
}
MessageContent::ToolRequest(request) if message.role == Role::Assistant => {
if !text_items.is_empty() {
input_items.push(json!({
"role": role,
"content": text_items
}));
text_items = Vec::new();
}

fn add_function_calls(input_items: &mut Vec<Value>, messages: &[Message]) {
for message in messages.iter().filter(|m| m.is_agent_visible()) {
if message.role == Role::Assistant {
for content in &message.content {
if let MessageContent::ToolRequest(request) = content {
if let Ok(tool_call) = &request.tool_call {
let arguments_str = tool_call
.arguments
Expand All @@ -352,57 +330,64 @@ fn add_function_calls(input_items: &mut Vec<Value>, messages: &[Message]) {
}));
}
}
}
}
}
}

fn add_function_call_outputs(input_items: &mut Vec<Value>, messages: &[Message]) {
for message in messages {
for content in &message.content {
if let MessageContent::ToolResponse(response) = content {
match &response.tool_result {
Ok(contents) => {
let text_content: Vec<String> = contents
.content
.iter()
.filter_map(|c| {
if let RawContent::Text(t) = c.deref() {
Some(t.text.clone())
} else {
None
}
})
.collect();
MessageContent::ToolResponse(response) => {
if !text_items.is_empty() {
input_items.push(json!({
"role": role,
"content": text_items
}));
text_items = Vec::new();
}

if !text_content.is_empty() {
match &response.tool_result {
Ok(contents) => {
let text_content: Vec<String> = contents
.content
.iter()
.filter_map(|c| {
if let RawContent::Text(t) = c.deref() {
Some(t.text.clone())
} else {
None
}
})
.collect();

if !text_content.is_empty() {
tracing::debug!(
"Sending function_call_output with call_id: {}",
response.id
);
input_items.push(json!({
"type": "function_call_output",
"call_id": response.id,
"output": text_content.join("\n")
}));
}
}
Err(error_data) => {
tracing::debug!(
"Sending function_call_output with call_id: {}",
"Sending function_call_output error with call_id: {}",
response.id
);
input_items.push(json!({
"type": "function_call_output",
"call_id": response.id,
"output": text_content.join("\n")
"output": format!("Error: {}", error_data.message)
}));
}
}
Err(error_data) => {
// Handle error responses - must send them back to the API
// to avoid "No tool output found" errors
tracing::debug!(
"Sending function_call_output error with call_id: {}",
response.id
);
input_items.push(json!({
"type": "function_call_output",
"call_id": response.id,
"output": format!("Error: {}", error_data.message)
}));
}
}
_ => {}
}
}

if !text_items.is_empty() {
input_items.push(json!({
"role": role,
"content": text_items
}));
}
}
}

Expand All @@ -424,9 +409,7 @@ pub fn create_responses_request(
}));
}

add_conversation_history(&mut input_items, messages);
add_function_calls(&mut input_items, messages);
add_function_call_outputs(&mut input_items, messages);
add_message_items(&mut input_items, messages);

let mut payload = json!({
"model": model_config.model_name,
Expand Down Expand Up @@ -763,7 +746,10 @@ where
mod tests {
use super::*;
use crate::conversation::message::MessageContent;
use crate::model::ModelConfig;
use futures::StreamExt;
use rmcp::model::CallToolRequestParams;
use rmcp::object;

#[tokio::test]
async fn test_responses_stream_ignores_keepalive_event() -> anyhow::Result<()> {
Expand Down Expand Up @@ -831,4 +817,60 @@ mod tests {

Ok(())
}

#[test]
fn test_history_preserves_chronological_order() {
let model_config = ModelConfig {
model_name: "gpt-5.2-codex".to_string(),
context_limit: None,
temperature: None,
max_tokens: None,
toolshim: false,
toolshim_model: None,
fast_model_config: None,
request_params: None,
};

let messages = vec![
Message::assistant()
.with_text("I'll create that file.")
.with_tool_request(
"call_1",
Ok(CallToolRequestParams {
meta: None,
task: None,
name: "shell".into(),
arguments: Some(object!({"command": "echo hello"})),
}),
),
Message::assistant()
.with_text("Now let me verify.")
.with_tool_request(
"call_2",
Ok(CallToolRequestParams {
meta: None,
task: None,
name: "shell".into(),
arguments: Some(object!({"command": "cat file.txt"})),
}),
),
];

let result = create_responses_request(&model_config, "", &messages, &[]).unwrap();
let input = result["input"].as_array().unwrap();

let types: Vec<&str> = input
.iter()
.map(|item| {
item.get("type")
.and_then(|v| v.as_str())
.unwrap_or_else(|| item["role"].as_str().unwrap())
})
.collect();

assert_eq!(
types,
vec!["assistant", "function_call", "assistant", "function_call"]
);
}
}