Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions crates/goose/src/providers/formats/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ pub mod gcpvertexai;
pub mod google;
pub mod openai;
pub mod openai_responses;
pub mod openrouter;
pub mod snowflake;
28 changes: 24 additions & 4 deletions crates/goose/src/providers/formats/openai.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::conversation::message::{Message, MessageContent};
use crate::conversation::message::{Message, MessageContent, ProviderMetadata};
use crate::model::ModelConfig;
use crate::providers::base::{ProviderUsage, Usage};
use crate::providers::utils::{
Expand Down Expand Up @@ -37,6 +37,7 @@ struct Delta {
content: Option<String>,
role: Option<String>,
tool_calls: Option<Vec<DeltaToolCall>>,
reasoning_details: Option<Vec<Value>>,
}

#[derive(Serialize, Deserialize, Debug)]
Expand Down Expand Up @@ -449,6 +450,8 @@ where
try_stream! {
use futures::StreamExt;

let mut accumulated_reasoning: Vec<Value> = Vec::new();

'outer: while let Some(response) = stream.next().await {
if response.as_ref().is_ok_and(|s| s == "data: [DONE]") {
break 'outer;
Expand All @@ -464,6 +467,12 @@ where
.ok_or_else(|| anyhow!("unexpected stream format"))?)
.map_err(|e| anyhow!("Failed to parse streaming chunk: {}: {:?}", e, &line))?;

if !chunk.choices.is_empty() {
if let Some(details) = &chunk.choices[0].delta.reasoning_details {
accumulated_reasoning.extend(details.iter().cloned());
}
}

let usage = chunk.usage.as_ref().and_then(|u| {
chunk.model.as_ref().map(|model| {
ProviderUsage {
Expand All @@ -486,7 +495,6 @@ where
}
}

// Check if this chunk already has finish_reason "tool_calls"
let is_complete = chunk.choices[0].finish_reason == Some("tool_calls".to_string());

if !is_complete {
Expand All @@ -502,6 +510,9 @@ where
.map_err(|e| anyhow!("Failed to parse streaming chunk: {}: {:?}", e, &line))?;

if !tool_chunk.choices.is_empty() {
if let Some(details) = &tool_chunk.choices[0].delta.reasoning_details {
accumulated_reasoning.extend(details.iter().cloned());
}
if let Some(delta_tool_calls) = &tool_chunk.choices[0].delta.tool_calls {
for delta_call in delta_tool_calls {
if let Some(index) = delta_call.index {
Expand All @@ -526,6 +537,14 @@ where
}
}

let metadata: Option<ProviderMetadata> = if !accumulated_reasoning.is_empty() {
let mut map = ProviderMetadata::new();
map.insert("reasoning_details".to_string(), json!(accumulated_reasoning));
Some(map)
} else {
None
};

let mut contents = Vec::new();
let mut sorted_indices: Vec<_> = tool_call_data.keys().cloned().collect();
sorted_indices.sort();
Expand All @@ -540,9 +559,10 @@ where

let content = match parsed {
Ok(params) => {
MessageContent::tool_request(
MessageContent::tool_request_with_metadata(
id.clone(),
Ok(CallToolRequestParam { name: function_name.clone().into(), arguments: Some(object(params)) }),
metadata.as_ref(),
)
},
Err(e) => {
Expand All @@ -554,7 +574,7 @@ where
)),
data: None,
};
MessageContent::tool_request(id.clone(), Err(error))
MessageContent::tool_request_with_metadata(id.clone(), Err(error), metadata.as_ref())
}
};
contents.push(content);
Expand Down
152 changes: 152 additions & 0 deletions crates/goose/src/providers/formats/openrouter.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
use crate::conversation::message::{Message, MessageContent, ProviderMetadata};
use crate::providers::formats::openai;
use rmcp::model::Role;
use serde_json::{json, Value};

pub const REASONING_DETAILS_KEY: &str = "reasoning_details";

fn has_assistant_content(message: &Message) -> bool {
message.content.iter().any(|c| match c {
MessageContent::Text(t) => !t.text.is_empty(),
MessageContent::Image(_) => true,
MessageContent::ToolRequest(req) => req.tool_call.is_ok(),
MessageContent::FrontendToolRequest(req) => req.tool_call.is_ok(),
_ => false,
})
}

pub fn extract_reasoning_details(response: &Value) -> Option<Vec<Value>> {
response
.get("choices")
.and_then(|c| c.get(0))
.and_then(|m| m.get("message"))
.and_then(|msg| msg.get("reasoning_details"))
.and_then(|d| d.as_array())
.cloned()
}

pub fn get_reasoning_details(metadata: &Option<ProviderMetadata>) -> Option<Vec<Value>> {
metadata
.as_ref()
.and_then(|m| m.get(REASONING_DETAILS_KEY))
.and_then(|v| v.as_array())
.cloned()
}

pub fn response_to_message(response: &Value) -> anyhow::Result<Message> {
let mut message = openai::response_to_message(response)?;

if let Some(details) = extract_reasoning_details(response) {
for content in &mut message.content {
if let MessageContent::ToolRequest(req) = content {
let mut meta = req.metadata.clone().unwrap_or_default();
meta.insert(REASONING_DETAILS_KEY.to_string(), json!(details));
req.metadata = Some(meta);
}
}
}

Ok(message)
}

pub fn add_reasoning_details_to_request(payload: &mut Value, messages: &[Message]) {
let mut assistant_reasoning: Vec<Option<Vec<Value>>> = messages
.iter()
.filter(|m| m.is_agent_visible())
.filter(|m| m.role == Role::Assistant)
.filter(|m| has_assistant_content(m))
.map(|message| {
message.content.iter().find_map(|c| match c {
MessageContent::ToolRequest(req) => get_reasoning_details(&req.metadata),
_ => None,
})
})
.collect();

if let Some(payload_messages) = payload
.as_object_mut()
.and_then(|obj| obj.get_mut("messages"))
.and_then(|m| m.as_array_mut())
{
let mut assistant_idx = 0;
for payload_msg in payload_messages.iter_mut() {
if payload_msg.get("role").and_then(|r| r.as_str()) == Some("assistant") {
if assistant_idx < assistant_reasoning.len() {
if let Some(details) = assistant_reasoning
.get_mut(assistant_idx)
.and_then(|d| d.take())
{
if let Some(obj) = payload_msg.as_object_mut() {
obj.insert("reasoning_details".to_string(), json!(details));
}
}
}
assistant_idx += 1;
}
}
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_extract_reasoning_details() {
let response = json!({
"choices": [{
"message": {
"content": "Hello",
"reasoning_details": [
{"type": "text", "text": "Let me think..."},
{"type": "encrypted", "data": "abc123signature"}
]
}
}]
});

let details = extract_reasoning_details(&response).unwrap();
assert_eq!(details.len(), 2);
}

#[test]
fn test_response_to_message_with_tool_calls() {
let response = json!({
"choices": [{
"message": {
"content": null,
"tool_calls": [{
"id": "call_123",
"type": "function",
"function": {
"name": "get_weather",
"arguments": "{\"location\": \"NYC\"}"
}
}],
"reasoning_details": [
{"type": "encrypted", "data": "sig456"}
]
}
}]
});

let message = response_to_message(&response).unwrap();
assert!(!message.content.is_empty());

let tool_request = message
.content
.iter()
.find_map(|c| {
if let MessageContent::ToolRequest(req) = c {
Some(req)
} else {
None
}
})
.unwrap();

assert!(tool_request.metadata.is_some());
let details = get_reasoning_details(&tool_request.metadata).unwrap();
assert_eq!(details.len(), 1);
}
}
Loading