diff --git a/crates/goose/src/providers/anthropic.rs b/crates/goose/src/providers/anthropic.rs index bddc4197040f..0558e3fe985b 100644 --- a/crates/goose/src/providers/anthropic.rs +++ b/crates/goose/src/providers/anthropic.rs @@ -14,11 +14,12 @@ use super::errors::ProviderError; use super::formats::anthropic::{ create_request, get_usage, response_to_message, response_to_streaming_message, }; -use super::utils::{emit_debug_trace, get_model, map_http_error_to_provider_error}; +use super::utils::{get_model, map_http_error_to_provider_error}; use crate::config::declarative_providers::DeclarativeProviderConfig; use crate::conversation::message::Message; use crate::model::ModelConfig; use crate::providers::retry::ProviderRetry; +use crate::providers::utils::RequestLog; use rmcp::model::Tool; pub const ANTHROPIC_DEFAULT_MODEL: &str = "claude-sonnet-4-0"; @@ -204,7 +205,8 @@ impl Provider for AnthropicProvider { usage.input_tokens, usage.output_tokens, usage.total_tokens); let response_model = get_model(&json_response); - emit_debug_trace(&self.model, &payload, &json_response, &usage); + let mut log = RequestLog::start(&self.model, &payload)?; + log.write(&json_response, Some(&usage))?; let provider_usage = ProviderUsage::new(response_model, usage); tracing::debug!( "🔍 Anthropic non-streaming returning ProviderUsage: {:?}", @@ -258,22 +260,26 @@ impl Provider for AnthropicProvider { .insert("stream".to_string(), Value::Bool(true)); let mut request = self.api_client.request("v1/messages"); + let mut log = RequestLog::start(&self.model, &payload)?; for (key, value) in self.get_conditional_headers() { request = request.header(key, value)?; } - let response = request.response_post(&payload).await?; + let response = request.response_post(&payload).await.inspect_err(|e| { + let _ = log.error(e); + })?; if !response.status().is_success() { let status = response.status(); let error_text = response.text().await.unwrap_or_default(); let error_json = serde_json::from_str::(&error_text).ok(); - return Err(map_http_error_to_provider_error(status, error_json)); + let error = map_http_error_to_provider_error(status, error_json); + let _ = log.error(&error); + return Err(error); } let stream = response.bytes_stream().map_err(io::Error::other); - let model = self.model.clone(); Ok(Box::pin(try_stream! { let stream_reader = StreamReader::new(stream); let framed = tokio_util::codec::FramedRead::new(stream_reader, tokio_util::codec::LinesCodec::new()).map_err(anyhow::Error::from); @@ -282,7 +288,7 @@ impl Provider for AnthropicProvider { pin!(message_stream); while let Some(message) = futures::StreamExt::next(&mut message_stream).await { let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?; - emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default()); + log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?; yield (message, usage); } })) diff --git a/crates/goose/src/providers/azure.rs b/crates/goose/src/providers/azure.rs index e261c2a8919c..cd7f51a42af6 100644 --- a/crates/goose/src/providers/azure.rs +++ b/crates/goose/src/providers/azure.rs @@ -9,9 +9,10 @@ use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; use super::formats::openai::{create_request, get_usage, response_to_message}; use super::retry::ProviderRetry; -use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat}; +use super::utils::{get_model, handle_response_openai_compat, ImageFormat}; use crate::conversation::message::Message; use crate::model::ModelConfig; +use crate::providers::utils::RequestLog; use rmcp::model::Tool; pub const AZURE_DEFAULT_MODEL: &str = "gpt-4o"; @@ -156,7 +157,8 @@ impl Provider for AzureProvider { Usage::default() }); let response_model = get_model(&response); - emit_debug_trace(model_config, &payload, &response, &usage); + let mut log = RequestLog::start(model_config, &payload)?; + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } } diff --git a/crates/goose/src/providers/bedrock.rs b/crates/goose/src/providers/bedrock.rs index 7ce5db9fac3b..b5f720692a0a 100644 --- a/crates/goose/src/providers/bedrock.rs +++ b/crates/goose/src/providers/bedrock.rs @@ -5,7 +5,7 @@ use super::errors::ProviderError; use super::retry::{ProviderRetry, RetryConfig}; use crate::conversation::message::Message; use crate::model::ModelConfig; -use crate::providers::utils::emit_debug_trace; +use crate::providers::utils::RequestLog; use anyhow::Result; use async_trait::async_trait; use aws_sdk_bedrockruntime::config::ProvideCredentials; @@ -222,12 +222,11 @@ impl Provider for BedrockProvider { "messages": messages, "tools": tools }); - emit_debug_trace( - &self.model, - &debug_payload, + let mut log = RequestLog::start(&self.model, &debug_payload)?; + log.write( &serde_json::to_value(&message).unwrap_or_default(), - &usage, - ); + Some(&usage), + )?; let provider_usage = ProviderUsage::new(model_name.to_string(), usage); Ok((message, provider_usage)) diff --git a/crates/goose/src/providers/claude_code.rs b/crates/goose/src/providers/claude_code.rs index 571004028794..d27be5ce273f 100644 --- a/crates/goose/src/providers/claude_code.rs +++ b/crates/goose/src/providers/claude_code.rs @@ -9,7 +9,7 @@ use tokio::process::Command; use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; -use super::utils::emit_debug_trace; +use super::utils::RequestLog; use crate::config::Config; use crate::conversation::message::{Message, MessageContent}; use crate::model::ModelConfig; @@ -495,13 +495,14 @@ impl Provider for ClaudeCodeProvider { "system": system, "messages": messages.len() }); + let mut log = RequestLog::start(model_config, &payload)?; let response = json!({ "lines": json_lines.len(), "usage": usage }); - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok(( message, diff --git a/crates/goose/src/providers/cursor_agent.rs b/crates/goose/src/providers/cursor_agent.rs index 0992bd49c61a..b5f4e0b98552 100644 --- a/crates/goose/src/providers/cursor_agent.rs +++ b/crates/goose/src/providers/cursor_agent.rs @@ -9,7 +9,7 @@ use tokio::process::Command; use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; -use super::utils::emit_debug_trace; +use super::utils::RequestLog; use crate::conversation::message::{Message, MessageContent}; use crate::model::ModelConfig; use rmcp::model::Tool; @@ -433,7 +433,8 @@ impl Provider for CursorAgentProvider { "usage": usage }); - emit_debug_trace(model_config, &payload, &response, &usage); + let mut log = RequestLog::start(&self.model, &payload)?; + log.write(&response, Some(&usage))?; Ok(( message, diff --git a/crates/goose/src/providers/databricks.rs b/crates/goose/src/providers/databricks.rs index 40847bb3ed1d..f21bc925a866 100644 --- a/crates/goose/src/providers/databricks.rs +++ b/crates/goose/src/providers/databricks.rs @@ -18,6 +18,7 @@ use super::oauth; use super::retry::ProviderRetry; use super::utils::{ get_model, handle_response_openai_compat, map_http_error_to_provider_error, ImageFormat, + RequestLog, }; use crate::config::ConfigError; use crate::conversation::message::Message; @@ -286,6 +287,8 @@ impl Provider for DatabricksProvider { .expect("payload should have model key") .remove("model"); + let mut log = RequestLog::start(&self.model, &payload)?; + let response = self .with_retry(|| self.post(payload.clone(), Some(&model_config.model_name))) .await?; @@ -296,7 +299,7 @@ impl Provider for DatabricksProvider { Usage::default() }); let response_model = get_model(&response); - super::utils::emit_debug_trace(&self.model, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } @@ -322,6 +325,7 @@ impl Provider for DatabricksProvider { .insert("stream".to_string(), Value::Bool(true)); let path = self.get_endpoint_path(&model_config.model_name, false); + let mut log = RequestLog::start(&self.model, &payload)?; let response = self .with_retry(|| async { let resp = self.api_client.response_post(&path, &payload).await?; @@ -335,11 +339,13 @@ impl Provider for DatabricksProvider { } Ok(resp) }) - .await?; + .await + .inspect_err(|e| { + let _ = log.error(e); + })?; let stream = response.bytes_stream().map_err(io::Error::other); - let model = self.model.clone(); Ok(Box::pin(try_stream! { let stream_reader = StreamReader::new(stream); let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from); @@ -348,7 +354,7 @@ impl Provider for DatabricksProvider { pin!(message_stream); while let Some(message) = message_stream.next().await { let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?; - super::utils::emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default()); + log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?; yield (message, usage); } })) diff --git a/crates/goose/src/providers/gcpvertexai.rs b/crates/goose/src/providers/gcpvertexai.rs index 8c3d4c765df1..e2ac26a31af3 100644 --- a/crates/goose/src/providers/gcpvertexai.rs +++ b/crates/goose/src/providers/gcpvertexai.rs @@ -21,7 +21,7 @@ use crate::providers::formats::gcpvertexai::{ use crate::providers::formats::gcpvertexai::GcpLocation::Iowa; use crate::providers::gcpauth::GcpAuth; use crate::providers::retry::RetryConfig; -use crate::providers::utils::emit_debug_trace; +use crate::providers::utils::RequestLog; use rmcp::model::Tool; /// Base URL for GCP Vertex AI documentation @@ -518,7 +518,8 @@ impl Provider for GcpVertexAIProvider { let response = self.post(&request, &context).await?; let usage = get_usage(&response, &context)?; - emit_debug_trace(model_config, &request, &response, &usage); + let mut log = RequestLog::start(model_config, &request)?; + log.write(&response, Some(&usage))?; // Convert response to message let message = response_to_message(response, context)?; diff --git a/crates/goose/src/providers/gemini_cli.rs b/crates/goose/src/providers/gemini_cli.rs index 22d0a04ebd46..417774a70002 100644 --- a/crates/goose/src/providers/gemini_cli.rs +++ b/crates/goose/src/providers/gemini_cli.rs @@ -8,7 +8,7 @@ use tokio::process::Command; use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; -use super::utils::emit_debug_trace; +use super::utils::RequestLog; use crate::conversation::message::{Message, MessageContent}; use crate::model::ModelConfig; @@ -317,12 +317,12 @@ impl Provider for GeminiCliProvider { } #[tracing::instrument( - skip(self, model_config, system, messages, tools), + skip(self, _model_config, system, messages, tools), fields(model_config, input, output, input_tokens, output_tokens, total_tokens) )] async fn complete_with_model( &self, - model_config: &ModelConfig, + _model_config: &ModelConfig, system: &str, messages: &[Message], tools: &[Tool], @@ -332,10 +332,6 @@ impl Provider for GeminiCliProvider { return self.generate_simple_session_description(messages); } - let lines = self.execute_command(system, messages, tools).await?; - - let (message, usage) = self.parse_response(&lines)?; - // Create a dummy payload for debug tracing let payload = json!({ "command": self.command, @@ -344,12 +340,22 @@ impl Provider for GeminiCliProvider { "messages": messages.len() }); + let mut log = RequestLog::start(&self.model, &payload).map_err(|e| { + ProviderError::RequestFailed(format!("Failed to start request log: {}", e)) + })?; + + let lines = self.execute_command(system, messages, tools).await?; + + let (message, usage) = self.parse_response(&lines)?; + let response = json!({ "lines": lines.len(), "usage": usage }); - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage)).map_err(|e| { + ProviderError::RequestFailed(format!("Failed to write request log: {}", e)) + })?; Ok(( message, diff --git a/crates/goose/src/providers/githubcopilot.rs b/crates/goose/src/providers/githubcopilot.rs index a6832d529a08..4a822ee7abbe 100644 --- a/crates/goose/src/providers/githubcopilot.rs +++ b/crates/goose/src/providers/githubcopilot.rs @@ -15,7 +15,7 @@ use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; use super::formats::openai::{create_request, get_usage, response_to_message}; use super::retry::ProviderRetry; -use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat}; +use super::utils::{get_model, handle_response_openai_compat, ImageFormat, RequestLog}; use crate::config::{Config, ConfigError}; use crate::conversation::message::Message; @@ -408,6 +408,7 @@ impl Provider for GithubCopilotProvider { tools: &[Tool], ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(model_config, system, messages, tools, &ImageFormat::OpenAi)?; + let mut log = RequestLog::start(model_config, &payload)?; // Make request with retry let response = self @@ -424,7 +425,7 @@ impl Provider for GithubCopilotProvider { Usage::default() }); let response_model = get_model(&response); - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } diff --git a/crates/goose/src/providers/google.rs b/crates/goose/src/providers/google.rs index 4cb907b02371..01cc45fc3a9f 100644 --- a/crates/goose/src/providers/google.rs +++ b/crates/goose/src/providers/google.rs @@ -1,7 +1,7 @@ use super::api_client::{ApiClient, AuthMethod}; use super::errors::ProviderError; use super::retry::ProviderRetry; -use super::utils::{emit_debug_trace, handle_response_google_compat, unescape_json_values}; +use super::utils::{handle_response_google_compat, unescape_json_values, RequestLog}; use crate::conversation::message::Message; use crate::model::ModelConfig; @@ -113,6 +113,7 @@ impl Provider for GoogleProvider { tools: &[Tool], ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(model_config, system, messages, tools)?; + let mut log = RequestLog::start(model_config, &payload)?; // Make request let response = self @@ -129,7 +130,7 @@ impl Provider for GoogleProvider { Some(model_version) => model_version.as_str().unwrap_or_default().to_string(), None => model_config.model_name.clone(), }; - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; let provider_usage = ProviderUsage::new(response_model, usage); Ok((message, provider_usage)) } diff --git a/crates/goose/src/providers/litellm.rs b/crates/goose/src/providers/litellm.rs index 7926e202e194..a0839724e8fc 100644 --- a/crates/goose/src/providers/litellm.rs +++ b/crates/goose/src/providers/litellm.rs @@ -8,7 +8,7 @@ use super::base::{ConfigKey, ModelInfo, Provider, ProviderMetadata, ProviderUsag use super::embedding::EmbeddingCapable; use super::errors::ProviderError; use super::retry::ProviderRetry; -use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat}; +use super::utils::{get_model, handle_response_openai_compat, ImageFormat, RequestLog}; use crate::conversation::message::Message; use crate::model::ModelConfig; @@ -188,7 +188,8 @@ impl Provider for LiteLLMProvider { let message = super::formats::openai::response_to_message(&response)?; let usage = super::formats::openai::get_usage(&response); let response_model = get_model(&response); - emit_debug_trace(model_config, &payload, &response, &usage); + let mut log = RequestLog::start(model_config, &payload)?; + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } diff --git a/crates/goose/src/providers/ollama.rs b/crates/goose/src/providers/ollama.rs index e1e0b5b20008..c9d36338164f 100644 --- a/crates/goose/src/providers/ollama.rs +++ b/crates/goose/src/providers/ollama.rs @@ -2,7 +2,9 @@ use super::api_client::{ApiClient, AuthMethod}; use super::base::{ConfigKey, MessageStream, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; use super::retry::ProviderRetry; -use super::utils::{get_model, handle_response_openai_compat, handle_status_openai_compat}; +use super::utils::{ + get_model, handle_response_openai_compat, handle_status_openai_compat, RequestLog, +}; use crate::config::declarative_providers::DeclarativeProviderConfig; use crate::conversation::message::Message; use crate::conversation::Conversation; @@ -213,7 +215,8 @@ impl Provider for OllamaProvider { Usage::default() }); let response_model = get_model(&response); - super::utils::emit_debug_trace(model_config, &payload, &response, &usage); + let mut log = RequestLog::start(model_config, &payload)?; + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } @@ -270,13 +273,14 @@ impl Provider for OllamaProvider { let model_config = self.model.clone(); Ok(Box::pin(try_stream! { + let mut log = RequestLog::start(&model_config, &payload)?; let stream_reader = StreamReader::new(stream); let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from); let message_stream = response_to_streaming_message(framed); pin!(message_stream); while let Some(message) = message_stream.next().await { let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?; - super::utils::emit_debug_trace(&model_config, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default()); + log.write(&message, usage.as_ref().map(|f| &f.usage))?; yield (message, usage); } })) diff --git a/crates/goose/src/providers/openai.rs b/crates/goose/src/providers/openai.rs index 31e4041b5b2f..c00d8e6a5ed5 100644 --- a/crates/goose/src/providers/openai.rs +++ b/crates/goose/src/providers/openai.rs @@ -17,8 +17,7 @@ use super::embedding::{EmbeddingCapable, EmbeddingRequest, EmbeddingResponse}; use super::errors::ProviderError; use super::formats::openai::{create_request, get_usage, response_to_message}; use super::utils::{ - emit_debug_trace, get_model, handle_response_openai_compat, handle_status_openai_compat, - ImageFormat, + get_model, handle_response_openai_compat, handle_status_openai_compat, ImageFormat, }; use crate::config::declarative_providers::DeclarativeProviderConfig; use crate::conversation::message::Message; @@ -26,6 +25,7 @@ use crate::conversation::message::Message; use crate::model::ModelConfig; use crate::providers::base::MessageStream; use crate::providers::formats::openai::response_to_streaming_message; +use crate::providers::utils::RequestLog; use rmcp::model::Tool; pub const OPEN_AI_DEFAULT_MODEL: &str = "gpt-4o"; @@ -218,7 +218,10 @@ impl Provider for OpenAiProvider { ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(model_config, system, messages, tools, &ImageFormat::OpenAi)?; - let json_response = self.post(&payload).await?; + let mut log = RequestLog::start(&self.model, &payload)?; + let json_response = self.post(&payload).await.inspect_err(|e| { + let _ = log.error(e); + })?; let message = response_to_message(&json_response)?; let usage = json_response @@ -229,7 +232,7 @@ impl Provider for OpenAiProvider { Usage::default() }); let model = get_model(&json_response); - emit_debug_trace(&self.model, &payload, &json_response, &usage); + log.write(&json_response, Some(&usage))?; Ok((message, ProviderUsage::new(model, usage))) } @@ -282,17 +285,23 @@ impl Provider for OpenAiProvider { payload["stream_options"] = json!({ "include_usage": true, }); + let mut log = RequestLog::start(&self.model, &payload)?; let response = self .api_client .response_post(&self.base_path, &payload) - .await?; - let response = handle_status_openai_compat(response).await?; + .await + .inspect_err(|e| { + let _ = log.error(e); + })?; + let response = handle_status_openai_compat(response) + .await + .inspect_err(|e| { + let _ = log.error(e); + })?; let stream = response.bytes_stream().map_err(io::Error::other); - let model_config = self.model.clone(); - Ok(Box::pin(try_stream! { let stream_reader = StreamReader::new(stream); let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from); @@ -301,7 +310,7 @@ impl Provider for OpenAiProvider { pin!(message_stream); while let Some(message) = message_stream.next().await { let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?; - emit_debug_trace(&model_config, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default()); + log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?; yield (message, usage); } })) diff --git a/crates/goose/src/providers/openrouter.rs b/crates/goose/src/providers/openrouter.rs index 85884a25a514..a3707af89a8e 100644 --- a/crates/goose/src/providers/openrouter.rs +++ b/crates/goose/src/providers/openrouter.rs @@ -7,8 +7,8 @@ use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; use super::retry::ProviderRetry; use super::utils::{ - emit_debug_trace, get_model, handle_response_google_compat, handle_response_openai_compat, - is_google_model, + get_model, handle_response_google_compat, handle_response_openai_compat, is_google_model, + RequestLog, }; use crate::conversation::message::Message; @@ -259,6 +259,7 @@ impl Provider for OpenRouterProvider { ) -> Result<(Message, ProviderUsage), ProviderError> { // Create the base payload let payload = create_request_based_on_model(self, system, messages, tools)?; + let mut log = RequestLog::start(model_config, &payload)?; // Make request let response = self @@ -275,7 +276,7 @@ impl Provider for OpenRouterProvider { Usage::default() }); let response_model = get_model(&response); - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } diff --git a/crates/goose/src/providers/sagemaker_tgi.rs b/crates/goose/src/providers/sagemaker_tgi.rs index 5313e7705ba0..9c804147cbd1 100644 --- a/crates/goose/src/providers/sagemaker_tgi.rs +++ b/crates/goose/src/providers/sagemaker_tgi.rs @@ -12,7 +12,7 @@ use serde_json::{json, Value}; use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage}; use super::errors::ProviderError; use super::retry::ProviderRetry; -use super::utils::emit_debug_trace; +use super::utils::RequestLog; use crate::conversation::message::{Message, MessageContent}; use crate::model::ModelConfig; @@ -312,12 +312,11 @@ impl Provider for SageMakerTgiProvider { "messages": messages, "tools": tools }); - emit_debug_trace( - &self.model, - &debug_payload, + let mut log = RequestLog::start(&self.model, &debug_payload)?; + log.write( &serde_json::to_value(&message).unwrap_or_default(), - &usage, - ); + Some(&usage), + )?; let provider_usage = ProviderUsage::new(model_name.to_string(), usage); Ok((message, provider_usage)) diff --git a/crates/goose/src/providers/snowflake.rs b/crates/goose/src/providers/snowflake.rs index aef57b5ae0ac..0534c52cf849 100644 --- a/crates/goose/src/providers/snowflake.rs +++ b/crates/goose/src/providers/snowflake.rs @@ -8,7 +8,7 @@ use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage}; use super::errors::ProviderError; use super::formats::snowflake::{create_request, get_usage, response_to_message}; use super::retry::ProviderRetry; -use super::utils::{get_model, map_http_error_to_provider_error, ImageFormat}; +use super::utils::{get_model, map_http_error_to_provider_error, ImageFormat, RequestLog}; use crate::config::ConfigError; use crate::conversation::message::Message; @@ -309,6 +309,8 @@ impl Provider for SnowflakeProvider { ) -> Result<(Message, ProviderUsage), ProviderError> { let payload = create_request(model_config, system, messages, tools)?; + let mut log = RequestLog::start(&self.model, &payload)?; + let response = self .with_retry(|| async { let payload_clone = payload.clone(); @@ -316,11 +318,11 @@ impl Provider for SnowflakeProvider { }) .await?; - // Parse response let message = response_to_message(&response)?; let usage = get_usage(&response)?; let response_model = get_model(&response); - super::utils::emit_debug_trace(model_config, &payload, &response, &usage); + + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } diff --git a/crates/goose/src/providers/tetrate.rs b/crates/goose/src/providers/tetrate.rs index c4d5924b92bb..e771d4916019 100644 --- a/crates/goose/src/providers/tetrate.rs +++ b/crates/goose/src/providers/tetrate.rs @@ -15,8 +15,8 @@ use super::errors::ProviderError; use super::formats::openai::response_to_streaming_message; use super::retry::ProviderRetry; use super::utils::{ - emit_debug_trace, get_model, handle_response_google_compat, handle_response_openai_compat, - handle_status_openai_compat, is_google_model, + get_model, handle_response_google_compat, handle_response_openai_compat, + handle_status_openai_compat, is_google_model, RequestLog, }; use crate::config::signup_tetrate::TETRATE_DEFAULT_MODEL; use crate::conversation::message::Message; @@ -173,6 +173,7 @@ impl Provider for TetrateProvider { tools, &super::utils::ImageFormat::OpenAi, )?; + let mut log = RequestLog::start(model_config, &payload)?; // Make request let response = self @@ -189,7 +190,7 @@ impl Provider for TetrateProvider { Usage::default() }); let model = get_model(&response); - emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(model, usage))) } @@ -220,7 +221,7 @@ impl Provider for TetrateProvider { let response = handle_status_openai_compat(response).await?; let stream = response.bytes_stream().map_err(io::Error::other); - let model_config = self.model.clone(); + let mut log = RequestLog::start(&self.model, &payload)?; Ok(Box::pin(try_stream! { let stream_reader = StreamReader::new(stream); @@ -230,7 +231,7 @@ impl Provider for TetrateProvider { pin!(message_stream); while let Some(message) = message_stream.next().await { let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?; - emit_debug_trace(&model_config, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default()); + log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?; yield (message, usage); } })) diff --git a/crates/goose/src/providers/utils.rs b/crates/goose/src/providers/utils.rs index 344e7092439d..6a17a3d7df61 100644 --- a/crates/goose/src/providers/utils.rs +++ b/crates/goose/src/providers/utils.rs @@ -3,16 +3,19 @@ use super::errors::GoogleErrorCode; use crate::config::paths::Paths; use crate::model::ModelConfig; use crate::providers::errors::{OpenAIError, ProviderError}; -use anyhow::Result; +use anyhow::{anyhow, Result}; use base64::Engine; use regex::Regex; use reqwest::{Response, StatusCode}; use rmcp::model::{AnnotateAble, ImageContent, RawImageContent}; use serde::{Deserialize, Serialize}; use serde_json::{json, Map, Value}; -use std::io::Read; -use std::path::Path; +use std::fmt::Display; +use std::fs::File; +use std::io::{BufWriter, Read, Write}; +use std::path::{Path, PathBuf}; use std::time::Duration; +use uuid::Uuid; #[derive(serde::Deserialize)] struct OpenAIErrorResponse { @@ -452,40 +455,95 @@ pub fn unescape_json_values(value: &Value) -> Value { } } -pub fn emit_debug_trace( - model_config: &ModelConfig, - payload: &T1, - response: &T2, - usage: &Usage, -) where - T1: ?Sized + Serialize, - T2: ?Sized + Serialize, -{ - let logs_dir = Paths::in_state_dir("logs"); - - if let Err(e) = std::fs::create_dir_all(&logs_dir) { - tracing::warn!("Failed to create logs directory: {}", e); - return; +pub struct RequestLog { + writer: Option>, + temp_path: PathBuf, +} + +const LOGS_TO_KEEP: usize = 10; + +impl RequestLog { + pub fn start(model_config: &ModelConfig, payload: &Payload) -> Result + where + Payload: Serialize, + { + let logs_dir = Paths::in_state_dir("logs"); + std::fs::create_dir_all(&logs_dir)?; + + let request_id = Uuid::new_v4(); + let temp_name = format!("llm_request.{request_id}.jsonl"); + let temp_path = logs_dir.join(PathBuf::from(temp_name)); + + let mut writer = BufWriter::new( + File::options() + .write(true) + .create(true) + .truncate(true) + .open(&temp_path)?, + ); + + let data = serde_json::json!({ + "model_config": model_config, + "input": payload, + }); + writeln!(writer, "{}", serde_json::to_string(&data)?)?; + + Ok(Self { + writer: Some(writer), + temp_path, + }) + } + + fn write_json(&mut self, line: &serde_json::Value) -> Result<()> { + let writer = self + .writer + .as_mut() + .ok_or_else(|| anyhow!("logger is finished"))?; + writeln!(writer, "{}", serde_json::to_string(line)?)?; + Ok(()) } - let log_path = |i| logs_dir.join(format!("llm_request.{}.json", i)); + pub fn error(&mut self, error: E) -> Result<()> + where + E: Display, + { + self.write_json(&serde_json::json!({ + "error": format!("{}", error), + })) + } - for i in (0..4).rev() { - let _ = std::fs::rename(log_path(i), log_path(i + 1)); + pub fn write(&mut self, data: &Payload, usage: Option<&Usage>) -> Result<()> + where + Payload: Serialize, + { + self.write_json(&serde_json::json!({ + "data": data, + "usage": usage, + })) } - let data = serde_json::json!({ - "model_config": model_config, - "input": payload, - "output": response, - "usage": usage, - }); - - if let Err(e) = std::fs::write( - log_path(0), - serde_json::to_string_pretty(&data).unwrap_or_default(), - ) { - tracing::warn!("Failed to write log file: {}", e); + fn finish(&mut self) -> Result<()> { + if let Some(mut writer) = self.writer.take() { + writer.flush()?; + let logs_dir = Paths::in_state_dir("logs"); + let log_path = |i| logs_dir.join(format!("llm_request.{}.jsonl", i)); + + for i in (0..LOGS_TO_KEEP - 1).rev() { + let _ = std::fs::rename(log_path(i), log_path(i + 1)); + } + + std::fs::rename(&self.temp_path, log_path(0))?; + } + Ok(()) + } +} + +impl Drop for RequestLog { + fn drop(&mut self) { + if std::thread::panicking() { + return; + } + let _ = self.finish(); } } diff --git a/crates/goose/src/providers/xai.rs b/crates/goose/src/providers/xai.rs index bb52b0e6c09e..ab75680acd64 100644 --- a/crates/goose/src/providers/xai.rs +++ b/crates/goose/src/providers/xai.rs @@ -1,7 +1,7 @@ use super::api_client::{ApiClient, AuthMethod}; use super::errors::ProviderError; use super::retry::ProviderRetry; -use super::utils::{get_model, handle_response_openai_compat}; +use super::utils::{get_model, handle_response_openai_compat, RequestLog}; use crate::conversation::message::Message; use crate::model::ModelConfig; @@ -110,6 +110,7 @@ impl Provider for XaiProvider { &super::utils::ImageFormat::OpenAi, )?; + let mut log = RequestLog::start(&self.model, &payload)?; let response = self.with_retry(|| self.post(payload.clone())).await?; let message = response_to_message(&response)?; @@ -118,7 +119,7 @@ impl Provider for XaiProvider { Usage::default() }); let response_model = get_model(&response); - super::utils::emit_debug_trace(model_config, &payload, &response, &usage); + log.write(&response, Some(&usage))?; Ok((message, ProviderUsage::new(response_model, usage))) } }