Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions crates/goose/src/providers/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,12 @@ use super::errors::ProviderError;
use super::formats::anthropic::{
create_request, get_usage, response_to_message, response_to_streaming_message,
};
use super::utils::{emit_debug_trace, get_model, map_http_error_to_provider_error};
use super::utils::{get_model, map_http_error_to_provider_error};
use crate::config::declarative_providers::DeclarativeProviderConfig;
use crate::conversation::message::Message;
use crate::model::ModelConfig;
use crate::providers::retry::ProviderRetry;
use crate::providers::utils::RequestLog;
use rmcp::model::Tool;

pub const ANTHROPIC_DEFAULT_MODEL: &str = "claude-sonnet-4-0";
Expand Down Expand Up @@ -204,7 +205,8 @@ impl Provider for AnthropicProvider {
usage.input_tokens, usage.output_tokens, usage.total_tokens);

let response_model = get_model(&json_response);
emit_debug_trace(&self.model, &payload, &json_response, &usage);
let mut log = RequestLog::start(&self.model, &payload)?;
log.write(&json_response, Some(&usage))?;
let provider_usage = ProviderUsage::new(response_model, usage);
tracing::debug!(
"🔍 Anthropic non-streaming returning ProviderUsage: {:?}",
Expand Down Expand Up @@ -258,22 +260,26 @@ impl Provider for AnthropicProvider {
.insert("stream".to_string(), Value::Bool(true));

let mut request = self.api_client.request("v1/messages");
let mut log = RequestLog::start(&self.model, &payload)?;

for (key, value) in self.get_conditional_headers() {
request = request.header(key, value)?;
}

let response = request.response_post(&payload).await?;
let response = request.response_post(&payload).await.inspect_err(|e| {
let _ = log.error(e);
})?;
if !response.status().is_success() {
let status = response.status();
let error_text = response.text().await.unwrap_or_default();
let error_json = serde_json::from_str::<Value>(&error_text).ok();
return Err(map_http_error_to_provider_error(status, error_json));
let error = map_http_error_to_provider_error(status, error_json);
let _ = log.error(&error);
return Err(error);
}

let stream = response.bytes_stream().map_err(io::Error::other);

let model = self.model.clone();
Ok(Box::pin(try_stream! {
let stream_reader = StreamReader::new(stream);
let framed = tokio_util::codec::FramedRead::new(stream_reader, tokio_util::codec::LinesCodec::new()).map_err(anyhow::Error::from);
Expand All @@ -282,7 +288,7 @@ impl Provider for AnthropicProvider {
pin!(message_stream);
while let Some(message) = futures::StreamExt::next(&mut message_stream).await {
let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?;
emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default());
log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?;
yield (message, usage);
}
}))
Expand Down
6 changes: 4 additions & 2 deletions crates/goose/src/providers/azure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::formats::openai::{create_request, get_usage, response_to_message};
use super::retry::ProviderRetry;
use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat};
use super::utils::{get_model, handle_response_openai_compat, ImageFormat};
use crate::conversation::message::Message;
use crate::model::ModelConfig;
use crate::providers::utils::RequestLog;
use rmcp::model::Tool;

pub const AZURE_DEFAULT_MODEL: &str = "gpt-4o";
Expand Down Expand Up @@ -156,7 +157,8 @@ impl Provider for AzureProvider {
Usage::default()
});
let response_model = get_model(&response);
emit_debug_trace(model_config, &payload, &response, &usage);
let mut log = RequestLog::start(model_config, &payload)?;
log.write(&response, Some(&usage))?;
Ok((message, ProviderUsage::new(response_model, usage)))
}
}
11 changes: 5 additions & 6 deletions crates/goose/src/providers/bedrock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use super::errors::ProviderError;
use super::retry::{ProviderRetry, RetryConfig};
use crate::conversation::message::Message;
use crate::model::ModelConfig;
use crate::providers::utils::emit_debug_trace;
use crate::providers::utils::RequestLog;
use anyhow::Result;
use async_trait::async_trait;
use aws_sdk_bedrockruntime::config::ProvideCredentials;
Expand Down Expand Up @@ -222,12 +222,11 @@ impl Provider for BedrockProvider {
"messages": messages,
"tools": tools
});
emit_debug_trace(
&self.model,
&debug_payload,
let mut log = RequestLog::start(&self.model, &debug_payload)?;
log.write(
&serde_json::to_value(&message).unwrap_or_default(),
&usage,
);
Some(&usage),
)?;

let provider_usage = ProviderUsage::new(model_name.to_string(), usage);
Ok((message, provider_usage))
Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/claude_code.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use tokio::process::Command;

use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::utils::emit_debug_trace;
use super::utils::RequestLog;
use crate::config::Config;
use crate::conversation::message::{Message, MessageContent};
use crate::model::ModelConfig;
Expand Down Expand Up @@ -495,13 +495,14 @@ impl Provider for ClaudeCodeProvider {
"system": system,
"messages": messages.len()
});
let mut log = RequestLog::start(model_config, &payload)?;

let response = json!({
"lines": json_lines.len(),
"usage": usage
});

emit_debug_trace(model_config, &payload, &response, &usage);
log.write(&response, Some(&usage))?;

Ok((
message,
Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/cursor_agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use tokio::process::Command;

use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::utils::emit_debug_trace;
use super::utils::RequestLog;
use crate::conversation::message::{Message, MessageContent};
use crate::model::ModelConfig;
use rmcp::model::Tool;
Expand Down Expand Up @@ -433,7 +433,8 @@ impl Provider for CursorAgentProvider {
"usage": usage
});

emit_debug_trace(model_config, &payload, &response, &usage);
let mut log = RequestLog::start(&self.model, &payload)?;
log.write(&response, Some(&usage))?;

Ok((
message,
Expand Down
14 changes: 10 additions & 4 deletions crates/goose/src/providers/databricks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use super::oauth;
use super::retry::ProviderRetry;
use super::utils::{
get_model, handle_response_openai_compat, map_http_error_to_provider_error, ImageFormat,
RequestLog,
};
use crate::config::ConfigError;
use crate::conversation::message::Message;
Expand Down Expand Up @@ -286,6 +287,8 @@ impl Provider for DatabricksProvider {
.expect("payload should have model key")
.remove("model");

let mut log = RequestLog::start(&self.model, &payload)?;

let response = self
.with_retry(|| self.post(payload.clone(), Some(&model_config.model_name)))
.await?;
Expand All @@ -296,7 +299,7 @@ impl Provider for DatabricksProvider {
Usage::default()
});
let response_model = get_model(&response);
super::utils::emit_debug_trace(&self.model, &payload, &response, &usage);
log.write(&response, Some(&usage))?;

Ok((message, ProviderUsage::new(response_model, usage)))
}
Expand All @@ -322,6 +325,7 @@ impl Provider for DatabricksProvider {
.insert("stream".to_string(), Value::Bool(true));

let path = self.get_endpoint_path(&model_config.model_name, false);
let mut log = RequestLog::start(&self.model, &payload)?;
let response = self
.with_retry(|| async {
let resp = self.api_client.response_post(&path, &payload).await?;
Expand All @@ -335,11 +339,13 @@ impl Provider for DatabricksProvider {
}
Ok(resp)
})
.await?;
.await
.inspect_err(|e| {
let _ = log.error(e);
})?;

let stream = response.bytes_stream().map_err(io::Error::other);

let model = self.model.clone();
Ok(Box::pin(try_stream! {
let stream_reader = StreamReader::new(stream);
let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from);
Expand All @@ -348,7 +354,7 @@ impl Provider for DatabricksProvider {
pin!(message_stream);
while let Some(message) = message_stream.next().await {
let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?;
super::utils::emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default());
log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?;
yield (message, usage);
}
}))
Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/gcpvertexai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use crate::providers::formats::gcpvertexai::{
use crate::providers::formats::gcpvertexai::GcpLocation::Iowa;
use crate::providers::gcpauth::GcpAuth;
use crate::providers::retry::RetryConfig;
use crate::providers::utils::emit_debug_trace;
use crate::providers::utils::RequestLog;
use rmcp::model::Tool;

/// Base URL for GCP Vertex AI documentation
Expand Down Expand Up @@ -518,7 +518,8 @@ impl Provider for GcpVertexAIProvider {
let response = self.post(&request, &context).await?;
let usage = get_usage(&response, &context)?;

emit_debug_trace(model_config, &request, &response, &usage);
let mut log = RequestLog::start(model_config, &request)?;
log.write(&response, Some(&usage))?;

// Convert response to message
let message = response_to_message(response, context)?;
Expand Down
22 changes: 14 additions & 8 deletions crates/goose/src/providers/gemini_cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use tokio::process::Command;

use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::utils::emit_debug_trace;
use super::utils::RequestLog;
use crate::conversation::message::{Message, MessageContent};

use crate::model::ModelConfig;
Expand Down Expand Up @@ -317,12 +317,12 @@ impl Provider for GeminiCliProvider {
}

#[tracing::instrument(
skip(self, model_config, system, messages, tools),
skip(self, _model_config, system, messages, tools),
fields(model_config, input, output, input_tokens, output_tokens, total_tokens)
)]
async fn complete_with_model(
&self,
model_config: &ModelConfig,
_model_config: &ModelConfig,
system: &str,
messages: &[Message],
tools: &[Tool],
Expand All @@ -332,10 +332,6 @@ impl Provider for GeminiCliProvider {
return self.generate_simple_session_description(messages);
}

let lines = self.execute_command(system, messages, tools).await?;

let (message, usage) = self.parse_response(&lines)?;

// Create a dummy payload for debug tracing
let payload = json!({
"command": self.command,
Expand All @@ -344,12 +340,22 @@ impl Provider for GeminiCliProvider {
"messages": messages.len()
});

let mut log = RequestLog::start(&self.model, &payload).map_err(|e| {
ProviderError::RequestFailed(format!("Failed to start request log: {}", e))
})?;

let lines = self.execute_command(system, messages, tools).await?;

let (message, usage) = self.parse_response(&lines)?;

let response = json!({
"lines": lines.len(),
"usage": usage
});

emit_debug_trace(model_config, &payload, &response, &usage);
log.write(&response, Some(&usage)).map_err(|e| {
ProviderError::RequestFailed(format!("Failed to write request log: {}", e))
})?;

Ok((
message,
Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/githubcopilot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::formats::openai::{create_request, get_usage, response_to_message};
use super::retry::ProviderRetry;
use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat};
use super::utils::{get_model, handle_response_openai_compat, ImageFormat, RequestLog};

use crate::config::{Config, ConfigError};
use crate::conversation::message::Message;
Expand Down Expand Up @@ -408,6 +408,7 @@ impl Provider for GithubCopilotProvider {
tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
let payload = create_request(model_config, system, messages, tools, &ImageFormat::OpenAi)?;
let mut log = RequestLog::start(model_config, &payload)?;

// Make request with retry
let response = self
Expand All @@ -424,7 +425,7 @@ impl Provider for GithubCopilotProvider {
Usage::default()
});
let response_model = get_model(&response);
emit_debug_trace(model_config, &payload, &response, &usage);
log.write(&response, Some(&usage))?;
Ok((message, ProviderUsage::new(response_model, usage)))
}

Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/google.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use super::api_client::{ApiClient, AuthMethod};
use super::errors::ProviderError;
use super::retry::ProviderRetry;
use super::utils::{emit_debug_trace, handle_response_google_compat, unescape_json_values};
use super::utils::{handle_response_google_compat, unescape_json_values, RequestLog};
use crate::conversation::message::Message;

use crate::model::ModelConfig;
Expand Down Expand Up @@ -113,6 +113,7 @@ impl Provider for GoogleProvider {
tools: &[Tool],
) -> Result<(Message, ProviderUsage), ProviderError> {
let payload = create_request(model_config, system, messages, tools)?;
let mut log = RequestLog::start(model_config, &payload)?;

// Make request
let response = self
Expand All @@ -129,7 +130,7 @@ impl Provider for GoogleProvider {
Some(model_version) => model_version.as_str().unwrap_or_default().to_string(),
None => model_config.model_name.clone(),
};
emit_debug_trace(model_config, &payload, &response, &usage);
log.write(&response, Some(&usage))?;
let provider_usage = ProviderUsage::new(response_model, usage);
Ok((message, provider_usage))
}
Expand Down
5 changes: 3 additions & 2 deletions crates/goose/src/providers/litellm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use super::base::{ConfigKey, ModelInfo, Provider, ProviderMetadata, ProviderUsag
use super::embedding::EmbeddingCapable;
use super::errors::ProviderError;
use super::retry::ProviderRetry;
use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat};
use super::utils::{get_model, handle_response_openai_compat, ImageFormat, RequestLog};
use crate::conversation::message::Message;

use crate::model::ModelConfig;
Expand Down Expand Up @@ -188,7 +188,8 @@ impl Provider for LiteLLMProvider {
let message = super::formats::openai::response_to_message(&response)?;
let usage = super::formats::openai::get_usage(&response);
let response_model = get_model(&response);
emit_debug_trace(model_config, &payload, &response, &usage);
let mut log = RequestLog::start(model_config, &payload)?;
log.write(&response, Some(&usage))?;
Ok((message, ProviderUsage::new(response_model, usage)))
}

Expand Down
10 changes: 7 additions & 3 deletions crates/goose/src/providers/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, Provider, ProviderMetadata, ProviderUsage, Usage};
use super::errors::ProviderError;
use super::retry::ProviderRetry;
use super::utils::{get_model, handle_response_openai_compat, handle_status_openai_compat};
use super::utils::{
get_model, handle_response_openai_compat, handle_status_openai_compat, RequestLog,
};
use crate::config::declarative_providers::DeclarativeProviderConfig;
use crate::conversation::message::Message;
use crate::conversation::Conversation;
Expand Down Expand Up @@ -213,7 +215,8 @@ impl Provider for OllamaProvider {
Usage::default()
});
let response_model = get_model(&response);
super::utils::emit_debug_trace(model_config, &payload, &response, &usage);
let mut log = RequestLog::start(model_config, &payload)?;
log.write(&response, Some(&usage))?;
Ok((message, ProviderUsage::new(response_model, usage)))
}

Expand Down Expand Up @@ -270,13 +273,14 @@ impl Provider for OllamaProvider {
let model_config = self.model.clone();

Ok(Box::pin(try_stream! {
let mut log = RequestLog::start(&model_config, &payload)?;
let stream_reader = StreamReader::new(stream);
let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from);
let message_stream = response_to_streaming_message(framed);
pin!(message_stream);
while let Some(message) = message_stream.next().await {
let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?;
super::utils::emit_debug_trace(&model_config, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default());
log.write(&message, usage.as_ref().map(|f| &f.usage))?;
yield (message, usage);
}
}))
Expand Down
Loading